diff --git a/EXTERNAL_MODEL_RESULTS.json b/EXTERNAL_MODEL_RESULTS.json index 0c27b42a0cdb771f23bea13a2f73984805de4a95..43361987968acc458106d6e0c288c9b5eaef9df6 100644 --- a/EXTERNAL_MODEL_RESULTS.json +++ b/EXTERNAL_MODEL_RESULTS.json @@ -1,3841 +1,2329 @@ { - "monot5-3b-msmarco-10k": { + "LaBSE-ru-turbo": { "BitextMining": { "f1": [ { - "Model": "monot5-3b-msmarco-10k" + "Model": "LaBSE-ru-turbo", + "Tatoeba (rus-Cyrl_eng-Latn)": 93.22 } ] }, "Classification": { "accuracy": [ { - "Model": "monot5-3b-msmarco-10k" + "Model": "LaBSE-ru-turbo", + "GeoreviewClassification (rus-Cyrl)": 46.04, + "HeadlineClassification (rus-Cyrl)": 69.98, + "InappropriatenessClassification (rus-Cyrl)": 61.39, + "KinopoiskClassification (rus-Cyrl)": 53.59, + "MassiveIntentClassification (rus-Cyrl)": 66.08, + "MassiveScenarioClassification (rus-Cyrl)": 71.13, + "RuReviewsClassification (rus-Cyrl)": 64.58, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 56.67, + "RuSciBenchOECDClassification (rus-Cyrl)": 43.58 } ] }, "Clustering": { "v_measure": [ { - "Model": "monot5-3b-msmarco-10k" + "Model": "LaBSE-ru-turbo", + "GeoreviewClusteringP2P (rus-Cyrl)": 64.55, + "MLSUMClusteringP2P (rus-Cyrl)": 45.7, + "MLSUMClusteringS2S (rus-Cyrl)": 42.93, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 50.64, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.48 } ] }, "PairClassification": { "ap": [ { - "Model": "monot5-3b-msmarco-10k" + "Model": "LaBSE-ru-turbo", + "OpusparcusPC (rus-Cyrl)": 89.32, + "TERRa (rus-Cyrl)": 57.81 } ] }, "Reranking": { "map": [ { - "Model": "monot5-3b-msmarco-10k" + "Model": "LaBSE-ru-turbo", + "RuBQReranking (rus-Cyrl)": 68.65 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "monot5-3b-msmarco-10k" + "Model": "LaBSE-ru-turbo", + "RiaNewsRetrieval (rus-Cyrl)": 69.36, + "RuBQRetrieval (rus-Cyrl)": 65.71 } ] }, "STS": { "spearman": [ { - "Model": "monot5-3b-msmarco-10k" + "Model": "LaBSE-ru-turbo", + "RUParaPhraserSTS (rus-Cyrl)": 72.97, + "RuSTSBenchmarkSTS (rus-Cyrl)": 81.77, + "STS22 (rus-Cyrl)": 62.89, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 81.81 } ] }, "Summarization": { "spearman": [ { - "Model": "monot5-3b-msmarco-10k" + "Model": "LaBSE-ru-turbo" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "monot5-3b-msmarco-10k", - "Core17InstructionRetrieval": 1.84, - "News21InstructionRetrieval": 1.78, - "Robust04InstructionRetrieval": 3.96 + "Model": "LaBSE-ru-turbo" } ] } }, - "LLM2Vec-Mistral-unsupervised": { + "all-mpnet-base-v2": { "BitextMining": { "f1": [ { - "Model": "LLM2Vec-Mistral-unsupervised" + "Model": "all-mpnet-base-v2", + "BornholmBitextMining (dan-Latn)": 27.44, + "Tatoeba (pol-Latn_eng-Latn)": 4.09, + "Tatoeba (ita-Latn_eng-Latn)": 11.1, + "Tatoeba (cat-Latn_eng-Latn)": 9.44, + "Tatoeba (aze-Latn_eng-Latn)": 1.49, + "Tatoeba (eus-Latn_eng-Latn)": 3.94, + "Tatoeba (epo-Latn_eng-Latn)": 7.15, + "Tatoeba (lit-Latn_eng-Latn)": 1.02, + "Tatoeba (ast-Latn_eng-Latn)": 9.78, + "Tatoeba (bul-Cyrl_eng-Latn)": 0.35, + "Tatoeba (ceb-Latn_eng-Latn)": 4.41, + "Tatoeba (mkd-Cyrl_eng-Latn)": 0.0, + "Tatoeba (tzl-Latn_eng-Latn)": 3.55, + "Tatoeba (zsm-Latn_eng-Latn)": 4.75, + "Tatoeba (mhr-Cyrl_eng-Latn)": 0.17, + "Tatoeba (pam-Latn_eng-Latn)": 4.32, + "Tatoeba (amh-Ethi_eng-Latn)": 0.0, + "Tatoeba (slv-Latn_eng-Latn)": 3.73, + "Tatoeba (lvs-Latn_eng-Latn)": 2.98, + "Tatoeba (sqi-Latn_eng-Latn)": 3.45, + "Tatoeba (orv-Cyrl_eng-Latn)": 0.0, + "Tatoeba (vie-Latn_eng-Latn)": 4.96, + "Tatoeba (pes-Arab_eng-Latn)": 0.2, + "Tatoeba (por-Latn_eng-Latn)": 10.48, + "Tatoeba (dtp-Latn_eng-Latn)": 3.54, + "Tatoeba (yid-Hebr_eng-Latn)": 0.08, + "Tatoeba (isl-Latn_eng-Latn)": 3.86, + "Tatoeba (cha-Latn_eng-Latn)": 12.2, + "Tatoeba (ron-Latn_eng-Latn)": 7.34, + "Tatoeba (hye-Armn_eng-Latn)": 0.14, + "Tatoeba (mar-Deva_eng-Latn)": 0.11, + "Tatoeba (hin-Deva_eng-Latn)": 0.02, + "Tatoeba (kor-Hang_eng-Latn)": 0.32, + "Tatoeba (srp-Cyrl_eng-Latn)": 1.89, + "Tatoeba (csb-Latn_eng-Latn)": 4.19, + "Tatoeba (jpn-Jpan_eng-Latn)": 1.71, + "Tatoeba (ber-Tfng_eng-Latn)": 4.56, + "Tatoeba (wuu-Hans_eng-Latn)": 0.91, + "Tatoeba (jav-Latn_eng-Latn)": 3.17, + "Tatoeba (nob-Latn_eng-Latn)": 4.37, + "Tatoeba (bre-Latn_eng-Latn)": 3.65, + "Tatoeba (kzj-Latn_eng-Latn)": 3.62, + "Tatoeba (urd-Arab_eng-Latn)": 0.0, + "Tatoeba (ces-Latn_eng-Latn)": 3.56, + "Tatoeba (cbk-Latn_eng-Latn)": 9.33, + "Tatoeba (gla-Latn_eng-Latn)": 2.04, + "Tatoeba (war-Latn_eng-Latn)": 5.14, + "Tatoeba (swh-Latn_eng-Latn)": 6.01, + "Tatoeba (swg-Latn_eng-Latn)": 7.86, + "Tatoeba (glg-Latn_eng-Latn)": 12.0, + "Tatoeba (fao-Latn_eng-Latn)": 7.08, + "Tatoeba (gsw-Latn_eng-Latn)": 10.67, + "Tatoeba (rus-Cyrl_eng-Latn)": 0.14, + "Tatoeba (kaz-Cyrl_eng-Latn)": 0.52, + "Tatoeba (gle-Latn_eng-Latn)": 2.19, + "Tatoeba (slk-Latn_eng-Latn)": 3.4, + "Tatoeba (nno-Latn_eng-Latn)": 5.75, + "Tatoeba (cor-Latn_eng-Latn)": 2.42, + "Tatoeba (nov-Latn_eng-Latn)": 16.61, + "Tatoeba (swe-Latn_eng-Latn)": 6.55, + "Tatoeba (max-Deva_eng-Latn)": 6.46, + "Tatoeba (oci-Latn_eng-Latn)": 8.57, + "Tatoeba (lfn-Latn_eng-Latn)": 6.1, + "Tatoeba (fra-Latn_eng-Latn)": 16.9, + "Tatoeba (ben-Beng_eng-Latn)": 0.0, + "Tatoeba (bel-Cyrl_eng-Latn)": 0.65, + "Tatoeba (lat-Latn_eng-Latn)": 5.78, + "Tatoeba (cmn-Hans_eng-Latn)": 2.22, + "Tatoeba (kat-Geor_eng-Latn)": 0.43, + "Tatoeba (bos-Latn_eng-Latn)": 4.6, + "Tatoeba (xho-Latn_eng-Latn)": 3.3, + "Tatoeba (tha-Thai_eng-Latn)": 0.0, + "Tatoeba (cym-Latn_eng-Latn)": 4.88, + "Tatoeba (deu-Latn_eng-Latn)": 11.46, + "Tatoeba (awa-Deva_eng-Latn)": 0.44, + "Tatoeba (ido-Latn_eng-Latn)": 9.84, + "Tatoeba (tat-Cyrl_eng-Latn)": 0.24, + "Tatoeba (kab-Latn_eng-Latn)": 1.31, + "Tatoeba (uzb-Latn_eng-Latn)": 1.98, + "Tatoeba (heb-Hebr_eng-Latn)": 0.28, + "Tatoeba (ara-Arab_eng-Latn)": 0.1, + "Tatoeba (fry-Latn_eng-Latn)": 12.43, + "Tatoeba (afr-Latn_eng-Latn)": 6.08, + "Tatoeba (kur-Latn_eng-Latn)": 3.65, + "Tatoeba (pms-Latn_eng-Latn)": 7.63, + "Tatoeba (ell-Grek_eng-Latn)": 0.0, + "Tatoeba (spa-Latn_eng-Latn)": 10.12, + "Tatoeba (dsb-Latn_eng-Latn)": 2.96, + "Tatoeba (uig-Arab_eng-Latn)": 0.33, + "Tatoeba (nld-Latn_eng-Latn)": 9.29, + "Tatoeba (tel-Telu_eng-Latn)": 0.73, + "Tatoeba (hrv-Latn_eng-Latn)": 3.77, + "Tatoeba (nds-Latn_eng-Latn)": 10.96, + "Tatoeba (hun-Latn_eng-Latn)": 3.23, + "Tatoeba (est-Latn_eng-Latn)": 2.35, + "Tatoeba (mal-Mlym_eng-Latn)": 0.15, + "Tatoeba (khm-Khmr_eng-Latn)": 0.28, + "Tatoeba (hsb-Latn_eng-Latn)": 3.12, + "Tatoeba (tgl-Latn_eng-Latn)": 4.06, + "Tatoeba (ang-Latn_eng-Latn)": 9.77, + "Tatoeba (tur-Latn_eng-Latn)": 3.16, + "Tatoeba (tuk-Latn_eng-Latn)": 2.23, + "Tatoeba (ile-Latn_eng-Latn)": 17.84, + "Tatoeba (mon-Cyrl_eng-Latn)": 0.81, + "Tatoeba (yue-Hant_eng-Latn)": 1.16, + "Tatoeba (ina-Latn_eng-Latn)": 22.55, + "Tatoeba (tam-Taml_eng-Latn)": 0.73, + "Tatoeba (ukr-Cyrl_eng-Latn)": 0.5, + "Tatoeba (dan-Latn_eng-Latn)": 10.01, + "Tatoeba (arq-Arab_eng-Latn)": 0.33, + "Tatoeba (arz-Arab_eng-Latn)": 0.0, + "Tatoeba (fin-Latn_eng-Latn)": 3.82, + "Tatoeba (ind-Latn_eng-Latn)": 4.88 } ] }, "Classification": { "accuracy": [ { - "Model": "LLM2Vec-Mistral-unsupervised", - "AmazonCounterfactualClassification (en)": 76.94, - "AmazonPolarityClassification": 85.29, - "AmazonReviewsClassification (en)": 47.09, - "Banking77Classification": 86.16, - "EmotionClassification": 48.88, - "ImdbClassification": 77.95, - "MTOPDomainClassification (en)": 95.48, - "MTOPIntentClassification (en)": 82.84, - "MassiveIntentClassification (en)": 76.65, - "MassiveScenarioClassification (en)": 79.99, - "ToxicConversationsClassification": 70.71, - "TweetSentimentExtractionClassification": 60.9 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "LLM2Vec-Mistral-unsupervised", - "ArxivClusteringP2P": 47.56, - "ArxivClusteringS2S": 39.92, - "BiorxivClusteringP2P": 36.14, - "BiorxivClusteringS2S": 30.26, - "MedrxivClusteringP2P": 30.11, - "MedrxivClusteringS2S": 26.93, - "RedditClustering": 41.83, - "RedditClusteringP2P": 62.08, - "StackExchangeClustering": 67.34, - "StackExchangeClusteringP2P": 34.5, - "TwentyNewsgroupsClustering": 30.26 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "LLM2Vec-Mistral-unsupervised", - "SprintDuplicateQuestions": 91.3, - "TwitterSemEval2015": 68.76, - "TwitterURLCorpus": 82.76 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "LLM2Vec-Mistral-unsupervised", - "AskUbuntuDupQuestions": 58.6, - "MindSmallReranking": 29.73, - "SciDocsRR": 77.81, - "StackOverflowDupQuestions": 49.8 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "LLM2Vec-Mistral-unsupervised", - "ArguAna": 51.0, - "CQADupstackRetrieval": 33.37, - "ClimateFEVER": 22.97, - "DBPedia": 25.48, - "FEVER": 45.11, - "FiQA2018": 27.24, - "HotpotQA": 54.54, - "MSMARCO": 19.13, - "NFCorpus": 27.16, - "NQ": 34.16, - "QuoraRetrieval": 84.4, - "SCIDOCS": 15.35, - "SciFact": 68.68, - "TRECCOVID": 55.67, - "Touche2020": 6.54 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "LLM2Vec-Mistral-unsupervised", - "BIOSSES": 83.29, - "SICK-R": 75.55, - "STS12": 67.65, - "STS13": 83.9, - "STS14": 76.97, - "STS15": 83.8, - "STS16": 81.91, - "STS17 (en-en)": 85.58, - "STS22 (en)": 65.93, - "STSBenchmark": 80.42 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "LLM2Vec-Mistral-unsupervised", - "SummEval": 30.19 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "LLM2Vec-Mistral-unsupervised" - } - ] - } - }, - "bge-small-en-v1.5-instruct": { - "BitextMining": { - "f1": [ - { - "Model": "bge-small-en-v1.5-instruct" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "bge-small-en-v1.5-instruct" + "Model": "all-mpnet-base-v2", + "AllegroReviews (pol-Latn)": 22.99, + "AmazonCounterfactualClassification (en-ext)": 67.5, + "AmazonCounterfactualClassification (en)": 65.03, + "AmazonCounterfactualClassification (deu-Latn)": 55.66, + "AmazonCounterfactualClassification (jpn-Jpan)": 60.69, + "AmazonPolarityClassification": 67.14, + "AmazonReviewsClassification (en)": 31.44, + "AmazonReviewsClassification (deu-Latn)": 26.05, + "AmazonReviewsClassification (spa-Latn)": 27.73, + "AmazonReviewsClassification (fra-Latn)": 28.49, + "AmazonReviewsClassification (jpn-Jpan)": 23.65, + "AmazonReviewsClassification (cmn-Hans)": 23.62, + "AngryTweetsClassification (dan-Latn)": 44.13, + "Banking77Classification": 81.7, + "CBD (pol-Latn)": 50.25, + "DanishPoliticalCommentsClassification (dan-Latn)": 28.31, + "EmotionClassification": 42.22, + "GeoreviewClassification (rus-Cyrl)": 25.93, + "HeadlineClassification (rus-Cyrl)": 28.53, + "IFlyTek (cmn-Hans)": 17.18, + "ImdbClassification": 71.17, + "InappropriatenessClassification (rus-Cyrl)": 51.82, + "JDReview (cmn-Hans)": 60.19, + "KinopoiskClassification (rus-Cyrl)": 34.18, + "LccSentimentClassification (dan-Latn)": 39.27, + "MTOPDomainClassification (en)": 91.89, + "MTOPDomainClassification (deu-Latn)": 71.86, + "MTOPDomainClassification (spa-Latn)": 71.3, + "MTOPDomainClassification (fra-Latn)": 74.88, + "MTOPDomainClassification (hin-Deva)": 39.93, + "MTOPDomainClassification (tha-Thai)": 17.54, + "MTOPIntentClassification (en)": 68.27, + "MTOPIntentClassification (deu-Latn)": 44.36, + "MTOPIntentClassification (spa-Latn)": 39.48, + "MTOPIntentClassification (fra-Latn)": 37.57, + "MTOPIntentClassification (hin-Deva)": 18.63, + "MTOPIntentClassification (tha-Thai)": 5.42, + "MasakhaNEWSClassification (amh-Ethi)": 36.49, + "MasakhaNEWSClassification (eng)": 79.75, + "MasakhaNEWSClassification (fra-Latn)": 77.77, + "MasakhaNEWSClassification (hau-Latn)": 59.22, + "MasakhaNEWSClassification (ibo-Latn)": 61.64, + "MasakhaNEWSClassification (lin-Latn)": 74.0, + "MasakhaNEWSClassification (lug-Latn)": 58.43, + "MasakhaNEWSClassification (orm-Ethi)": 48.15, + "MasakhaNEWSClassification (pcm-Latn)": 92.2, + "MasakhaNEWSClassification (run-Latn)": 64.72, + "MasakhaNEWSClassification (sna-Latn)": 73.69, + "MasakhaNEWSClassification (som-Latn)": 49.97, + "MasakhaNEWSClassification (swa-Latn)": 55.15, + "MasakhaNEWSClassification (tir-Ethi)": 27.46, + "MasakhaNEWSClassification (xho-Latn)": 60.98, + "MasakhaNEWSClassification (yor-Latn)": 63.33, + "MassiveIntentClassification (en)": 69.76, + "MassiveIntentClassification (jav-Latn)": 31.75, + "MassiveIntentClassification (fra-Latn)": 44.27, + "MassiveIntentClassification (msa-Latn)": 30.53, + "MassiveIntentClassification (hun-Latn)": 34.38, + "MassiveIntentClassification (pol-Latn)": 34.26, + "MassiveIntentClassification (nld-Latn)": 38.49, + "MassiveIntentClassification (tha-Thai)": 8.51, + "MassiveIntentClassification (tur-Latn)": 32.02, + "MassiveIntentClassification (tam-Taml)": 9.25, + "MassiveIntentClassification (hye-Armn)": 10.11, + "MassiveIntentClassification (khm-Khmr)": 4.74, + "MassiveIntentClassification (lav-Latn)": 35.08, + "MassiveIntentClassification (deu-Latn)": 44.54, + "MassiveIntentClassification (spa-Latn)": 39.75, + "MassiveIntentClassification (ben-Beng)": 12.35, + "MassiveIntentClassification (por-Latn)": 42.83, + "MassiveIntentClassification (ara-Arab)": 20.42, + "MassiveIntentClassification (cym-Latn)": 30.82, + "MassiveIntentClassification (dan-Latn)": 42.36, + "MassiveIntentClassification (mya-Mymr)": 4.6, + "MassiveIntentClassification (heb-Hebr)": 23.6, + "MassiveIntentClassification (kan-Knda)": 3.76, + "MassiveIntentClassification (swa-Latn)": 31.82, + "MassiveIntentClassification (fas-Arab)": 22.45, + "MassiveIntentClassification (hin-Deva)": 17.68, + "MassiveIntentClassification (kat-Geor)": 7.66, + "MassiveIntentClassification (mal-Mlym)": 2.64, + "MassiveIntentClassification (fin-Latn)": 34.58, + "MassiveIntentClassification (slv-Latn)": 34.49, + "MassiveIntentClassification (afr-Latn)": 36.49, + "MassiveIntentClassification (urd-Arab)": 12.86, + "MassiveIntentClassification (ron-Latn)": 38.07, + "MassiveIntentClassification (sqi-Latn)": 37.26, + "MassiveIntentClassification (cmo-Hant)": 22.43, + "MassiveIntentClassification (ita-Latn)": 40.29, + "MassiveIntentClassification (ind-Latn)": 36.31, + "MassiveIntentClassification (nob-Latn)": 39.3, + "MassiveIntentClassification (jpn-Jpan)": 33.13, + "MassiveIntentClassification (aze-Latn)": 28.92, + "MassiveIntentClassification (mon-Cyrl)": 19.65, + "MassiveIntentClassification (ell-Grek)": 24.52, + "MassiveIntentClassification (rus-Cyrl)": 23.98, + "MassiveIntentClassification (kor-Kore)": 13.35, + "MassiveIntentClassification (cmo-Hans)": 24.36, + "MassiveIntentClassification (isl-Latn)": 31.46, + "MassiveIntentClassification (swe-Latn)": 39.02, + "MassiveIntentClassification (tel-Telu)": 2.26, + "MassiveIntentClassification (vie-Latn)": 31.47, + "MassiveIntentClassification (tgl-Latn)": 36.33, + "MassiveIntentClassification (amh-Ethi)": 2.39, + "MassiveScenarioClassification (en)": 75.67, + "MassiveScenarioClassification (tur-Latn)": 39.11, + "MassiveScenarioClassification (kat-Geor)": 13.45, + "MassiveScenarioClassification (jpn-Jpan)": 40.57, + "MassiveScenarioClassification (spa-Latn)": 50.92, + "MassiveScenarioClassification (fas-Arab)": 27.8, + "MassiveScenarioClassification (hun-Latn)": 41.01, + "MassiveScenarioClassification (jav-Latn)": 40.0, + "MassiveScenarioClassification (por-Latn)": 52.06, + "MassiveScenarioClassification (sqi-Latn)": 44.67, + "MassiveScenarioClassification (lav-Latn)": 39.28, + "MassiveScenarioClassification (deu-Latn)": 54.09, + "MassiveScenarioClassification (nld-Latn)": 47.79, + "MassiveScenarioClassification (mon-Cyrl)": 25.58, + "MassiveScenarioClassification (swa-Latn)": 40.34, + "MassiveScenarioClassification (ben-Beng)": 17.49, + "MassiveScenarioClassification (cym-Latn)": 34.82, + "MassiveScenarioClassification (swe-Latn)": 44.53, + "MassiveScenarioClassification (rus-Cyrl)": 28.71, + "MassiveScenarioClassification (fra-Latn)": 54.26, + "MassiveScenarioClassification (dan-Latn)": 49.45, + "MassiveScenarioClassification (mya-Mymr)": 10.8, + "MassiveScenarioClassification (ron-Latn)": 47.86, + "MassiveScenarioClassification (cmo-Hans)": 35.33, + "MassiveScenarioClassification (hin-Deva)": 23.13, + "MassiveScenarioClassification (cmo-Hant)": 31.7, + "MassiveScenarioClassification (afr-Latn)": 43.63, + "MassiveScenarioClassification (aze-Latn)": 36.42, + "MassiveScenarioClassification (msa-Latn)": 37.28, + "MassiveScenarioClassification (ell-Grek)": 33.85, + "MassiveScenarioClassification (isl-Latn)": 39.36, + "MassiveScenarioClassification (fin-Latn)": 38.41, + "MassiveScenarioClassification (ind-Latn)": 43.05, + "MassiveScenarioClassification (pol-Latn)": 42.66, + "MassiveScenarioClassification (tam-Taml)": 14.55, + "MassiveScenarioClassification (ita-Latn)": 51.37, + "MassiveScenarioClassification (urd-Arab)": 20.0, + "MassiveScenarioClassification (kan-Knda)": 8.34, + "MassiveScenarioClassification (tel-Telu)": 7.81, + "MassiveScenarioClassification (mal-Mlym)": 7.69, + "MassiveScenarioClassification (ara-Arab)": 27.8, + "MassiveScenarioClassification (kor-Kore)": 17.28, + "MassiveScenarioClassification (vie-Latn)": 35.9, + "MassiveScenarioClassification (amh-Ethi)": 7.43, + "MassiveScenarioClassification (heb-Hebr)": 25.49, + "MassiveScenarioClassification (hye-Armn)": 16.86, + "MassiveScenarioClassification (khm-Khmr)": 9.63, + "MassiveScenarioClassification (slv-Latn)": 39.88, + "MassiveScenarioClassification (tgl-Latn)": 47.04, + "MassiveScenarioClassification (nob-Latn)": 45.75, + "MassiveScenarioClassification (tha-Thai)": 17.01, + "MultilingualSentiment (cmn-Hans)": 41.2, + "NoRecClassification (nob-Latn)": 38.34, + "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 50.15, + "OnlineShopping (cmn-Hans)": 56.94, + "PAC (pol-Latn)": 62.1, + "PolEmo2.0-IN (pol-Latn)": 41.63, + "PolEmo2.0-OUT (pol-Latn)": 25.0, + "RuReviewsClassification (rus-Cyrl)": 42.33, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 13.29, + "RuSciBenchOECDClassification (rus-Cyrl)": 10.62, + "TNews (cmn-Hans)": 21.05, + "ToxicConversationsClassification": 61.05, + "TweetSentimentExtractionClassification": 55.05, + "Waimai (cmn-Hans)": 63.31 } ] }, "Clustering": { "v_measure": [ { - "Model": "bge-small-en-v1.5-instruct" + "Model": "all-mpnet-base-v2", + "ArxivClusteringP2P": 48.38, + "ArxivClusteringS2S": 39.72, + "BiorxivClusteringP2P": 39.62, + "BiorxivClusteringS2S": 35.02, + "GeoreviewClusteringP2P (rus-Cyrl)": 20.33, + "MasakhaNEWSClusteringP2P (amh-Ethi)": 42.49, + "MasakhaNEWSClusteringP2P (eng)": 67.24, + "MasakhaNEWSClusteringP2P (fra-Latn)": 61.99, + "MasakhaNEWSClusteringP2P (hau-Latn)": 37.17, + "MasakhaNEWSClusteringP2P (ibo-Latn)": 52.0, + "MasakhaNEWSClusteringP2P (lin-Latn)": 69.68, + "MasakhaNEWSClusteringP2P (lug-Latn)": 50.96, + "MasakhaNEWSClusteringP2P (orm-Ethi)": 28.42, + "MasakhaNEWSClusteringP2P (pcm-Latn)": 64.01, + "MasakhaNEWSClusteringP2P (run-Latn)": 57.6, + "MasakhaNEWSClusteringP2P (sna-Latn)": 54.99, + "MasakhaNEWSClusteringP2P (som-Latn)": 31.16, + "MasakhaNEWSClusteringP2P (swa-Latn)": 28.29, + "MasakhaNEWSClusteringP2P (tir-Ethi)": 41.85, + "MasakhaNEWSClusteringP2P (xho-Latn)": 35.24, + "MasakhaNEWSClusteringP2P (yor-Latn)": 42.15, + "MasakhaNEWSClusteringS2S (amh-Ethi)": 44.48, + "MasakhaNEWSClusteringS2S (eng)": 35.69, + "MasakhaNEWSClusteringS2S (fra-Latn)": 41.05, + "MasakhaNEWSClusteringS2S (hau-Latn)": 16.64, + "MasakhaNEWSClusteringS2S (ibo-Latn)": 38.63, + "MasakhaNEWSClusteringS2S (lin-Latn)": 70.72, + "MasakhaNEWSClusteringS2S (lug-Latn)": 46.97, + "MasakhaNEWSClusteringS2S (orm-Ethi)": 23.85, + "MasakhaNEWSClusteringS2S (pcm-Latn)": 68.7, + "MasakhaNEWSClusteringS2S (run-Latn)": 52.27, + "MasakhaNEWSClusteringS2S (sna-Latn)": 47.64, + "MasakhaNEWSClusteringS2S (som-Latn)": 30.94, + "MasakhaNEWSClusteringS2S (swa-Latn)": 17.12, + "MasakhaNEWSClusteringS2S (tir-Ethi)": 42.01, + "MasakhaNEWSClusteringS2S (xho-Latn)": 24.16, + "MasakhaNEWSClusteringS2S (yor-Latn)": 35.04, + "MedrxivClusteringP2P": 35.58, + "MedrxivClusteringS2S": 32.87, + "RedditClustering": 54.82, + "RedditClusteringP2P": 56.77, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 14.66, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 12.49, + "StackExchangeClustering": 53.8, + "StackExchangeClusteringP2P": 34.28, + "TwentyNewsgroupsClustering": 49.74 } ] }, "PairClassification": { "ap": [ { - "Model": "bge-small-en-v1.5-instruct" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "bge-small-en-v1.5-instruct" + "Model": "all-mpnet-base-v2", + "CDSC-E (pol-Latn)": 45.37, + "OpusparcusPC (deu-Latn)": 89.78, + "OpusparcusPC (en)": 97.75, + "OpusparcusPC (fin-Latn)": 85.82, + "OpusparcusPC (fra-Latn)": 86.61, + "OpusparcusPC (rus-Cyrl)": 79.85, + "OpusparcusPC (swe-Latn)": 81.81, + "PSC (pol-Latn)": 83.28, + "PawsXPairClassification (deu-Latn)": 52.17, + "PawsXPairClassification (en)": 61.99, + "PawsXPairClassification (spa-Latn)": 55.06, + "PawsXPairClassification (fra-Latn)": 56.42, + "PawsXPairClassification (jpn-Hira)": 47.43, + "PawsXPairClassification (kor-Hang)": 49.75, + "PawsXPairClassification (cmn-Hans)": 52.47, + "SICK-E-PL (pol-Latn)": 46.51, + "SprintDuplicateQuestions": 90.15, + "TERRa (rus-Cyrl)": 44.52, + "TwitterSemEval2015": 73.85, + "TwitterURLCorpus": 85.11 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "all-mpnet-base-v2", + "AlloprofReranking (fra-Latn)": 69.63, + "AskUbuntuDupQuestions": 65.85, + "MMarcoReranking (cmn-Hans)": 4.65, + "MindSmallReranking": 30.97, + "RuBQReranking (rus-Cyrl)": 30.96, + "SciDocsRR": 88.65, + "StackOverflowDupQuestions": 51.98, + "SyntecReranking (fra-Latn)": 66.12, + "T2Reranking (cmn-Hans)": 58.3 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "bge-small-en-v1.5-instruct", - "ARCChallenge": 7.72, - "AlphaNLI": 1.26, - "HellaSwag": 23.41, - "PIQA": 20.79, - "Quail": 2.01, - "RARbCode": 41.52, - "RARbMath": 46.5, - "SIQA": 0.98, - "SpartQA": 2.86, - "TempReasonL1": 1.27, - "TempReasonL2Fact": 16.72, - "TempReasonL2Pure": 1.1, - "TempReasonL3Fact": 12.81, - "TempReasonL3Pure": 4.63, - "WinoGrande": 5.35 + "Model": "all-mpnet-base-v2", + "AILACasedocs": 22.51, + "AILAStatutes": 21.27, + "ARCChallenge": 11.8, + "AlloprofRetrieval (fra-Latn)": 34.27, + "AlphaNLI": 22.41, + "ArguAna": 46.52, + "ArguAna-PL (pol-Latn)": 14.72, + "BSARDRetrieval (fra-Latn)": 6.98, + "BrightRetrieval (robotics)": 8.36, + "BrightRetrieval (psychology)": 22.63, + "BrightRetrieval (leetcode)": 26.4, + "BrightRetrieval (biology)": 15.52, + "BrightRetrieval (theoremqa_questions)": 18.49, + "BrightRetrieval (economics)": 16.64, + "BrightRetrieval (stackoverflow)": 9.48, + "BrightRetrieval (pony)": 6.95, + "BrightRetrieval (earth_science)": 20.11, + "BrightRetrieval (theoremqa_theorems)": 12.38, + "BrightRetrieval (sustainable_living)": 15.34, + "BrightRetrieval (aops)": 5.32, + "CQADupstackRetrieval": 44.96, + "ClimateFEVER": 21.97, + "CmedqaRetrieval (cmn-Hans)": 2.0, + "CovidRetrieval (cmn-Hans)": 3.7, + "DBPedia": 32.09, + "DuRetrieval (cmn-Hans)": 4.92, + "EcomRetrieval (cmn-Hans)": 3.94, + "FEVER": 50.86, + "FiQA-PL (pol-Latn)": 3.6, + "FiQA2018": 49.96, + "GerDaLIRSmall (deu-Latn)": 3.78, + "HellaSwag": 26.27, + "HotpotQA": 39.29, + "LEMBNarrativeQARetrieval": 19.34, + "LEMBNeedleRetrieval": 16.0, + "LEMBPasskeyRetrieval": 24.5, + "LEMBQMSumRetrieval": 21.54, + "LEMBSummScreenFDRetrieval": 60.43, + "LEMBWikimQARetrieval": 44.92, + "LeCaRDv2 (zho-Hans)": 18.09, + "LegalBenchConsumerContractsQA": 75.25, + "LegalBenchCorporateLobbying": 89.04, + "LegalQuAD (deu-Latn)": 10.67, + "LegalSummarization": 58.55, + "MMarcoRetrieval (cmn-Hans)": 7.13, + "MSMARCO": 39.75, + "MedicalRetrieval (cmn-Hans)": 1.71, + "MintakaRetrieval (ara-Arab)": 1.97, + "MintakaRetrieval (deu-Latn)": 17.21, + "MintakaRetrieval (spa-Latn)": 10.11, + "MintakaRetrieval (fra-Latn)": 12.93, + "MintakaRetrieval (hin-Deva)": 2.05, + "MintakaRetrieval (ita-Latn)": 5.63, + "MintakaRetrieval (jpn-Hira)": 6.72, + "MintakaRetrieval (por-Latn)": 8.05, + "NFCorpus": 33.29, + "NFCorpus-PL (pol-Latn)": 8.77, + "NQ": 50.45, + "PIQA": 29.03, + "Quail": 3.41, + "QuoraRetrieval": 87.46, + "RARbCode": 53.21, + "RARbMath": 71.85, + "RuBQRetrieval (rus-Cyrl)": 4.75, + "SCIDOCS": 23.76, + "SCIDOCS-PL (pol-Latn)": 4.02, + "SIQA": 2.38, + "SciFact": 65.57, + "SciFact-PL (pol-Latn)": 13.31, + "SpartQA": 0.22, + "SyntecRetrieval (fra-Latn)": 57.39, + "T2Retrieval (cmn-Hans)": 2.98, + "TRECCOVID": 51.33, + "TRECCOVID-PL (pol-Latn)": 12.12, + "TempReasonL1": 1.77, + "TempReasonL2Fact": 11.2, + "TempReasonL2Pure": 1.15, + "TempReasonL3Fact": 9.42, + "TempReasonL3Pure": 5.59, + "Touche2020": 19.93, + "VideoRetrieval (cmn-Hans)": 8.48, + "WinoGrande": 20.8, + "XPQARetrieval (ara-Arab_ara-Arab)": 9.42, + "XPQARetrieval (eng-Latn_ara-Arab)": 2.39, + "XPQARetrieval (ara-Arab_eng-Latn)": 8.98, + "XPQARetrieval (deu-Latn_deu-Latn)": 55.82, + "XPQARetrieval (eng-Latn_deu-Latn)": 11.74, + "XPQARetrieval (deu-Latn_eng-Latn)": 30.44, + "XPQARetrieval (spa-Latn_spa-Latn)": 40.01, + "XPQARetrieval (eng-Latn_spa-Latn)": 6.12, + "XPQARetrieval (spa-Latn_eng-Latn)": 29.44, + "XPQARetrieval (fra-Latn_fra-Latn)": 51.94, + "XPQARetrieval (eng-Latn_fra-Latn)": 11.48, + "XPQARetrieval (fra-Latn_eng-Latn)": 32.52, + "XPQARetrieval (hin-Deva_hin-Deva)": 37.48, + "XPQARetrieval (eng-Latn_hin-Deva)": 5.11, + "XPQARetrieval (hin-Deva_eng-Latn)": 7.37, + "XPQARetrieval (ita-Latn_ita-Latn)": 54.2, + "XPQARetrieval (eng-Latn_ita-Latn)": 6.08, + "XPQARetrieval (ita-Latn_eng-Latn)": 30.32, + "XPQARetrieval (jpn-Hira_jpn-Hira)": 37.45, + "XPQARetrieval (eng-Latn_jpn-Hira)": 5.79, + "XPQARetrieval (jpn-Hira_eng-Latn)": 14.77, + "XPQARetrieval (kor-Hang_kor-Hang)": 10.4, + "XPQARetrieval (eng-Latn_kor-Hang)": 7.09, + "XPQARetrieval (kor-Hang_eng-Latn)": 6.95, + "XPQARetrieval (pol-Latn_pol-Latn)": 23.67, + "XPQARetrieval (eng-Latn_pol-Latn)": 8.83, + "XPQARetrieval (pol-Latn_eng-Latn)": 15.94, + "XPQARetrieval (por-Latn_por-Latn)": 33.56, + "XPQARetrieval (eng-Latn_por-Latn)": 3.76, + "XPQARetrieval (por-Latn_eng-Latn)": 23.45, + "XPQARetrieval (tam-Taml_tam-Taml)": 5.53, + "XPQARetrieval (eng-Latn_tam-Taml)": 3.3, + "XPQARetrieval (tam-Taml_eng-Latn)": 4.0, + "XPQARetrieval (cmn-Hans_cmn-Hans)": 23.84, + "XPQARetrieval (eng-Latn_cmn-Hans)": 7.2, + "XPQARetrieval (cmn-Hans_eng-Latn)": 12.84 } ] }, "STS": { "spearman": [ { - "Model": "bge-small-en-v1.5-instruct" + "Model": "all-mpnet-base-v2", + "AFQMC (cmn-Hans)": 8.01, + "ATEC (cmn-Hans)": 14.03, + "BIOSSES": 80.43, + "BQ (cmn-Hans)": 21.39, + "CDSC-R (pol-Latn)": 77.04, + "LCQMC (cmn-Hans)": 22.84, + "PAWSX (cmn-Hans)": 6.44, + "RUParaPhraserSTS (rus-Cyrl)": 42.15, + "RuSTSBenchmarkSTS (rus-Cyrl)": 55.68, + "SICK-R": 80.59, + "SICK-R-PL (pol-Latn)": 50.2, + "SICKFr (fra-Latn)": 67.05, + "STS12": 72.63, + "STS13": 83.48, + "STS14": 78.0, + "STS15": 85.66, + "STS16": 80.03, + "STS17 (en-en)": 90.6, + "STS17 (eng-Latn_ara-Arab)": 6.76, + "STS17 (fra-Latn_eng-Latn)": 41.64, + "STS17 (eng-Latn_tur-Latn)": -4.58, + "STS17 (eng-Latn_deu-Latn)": 35.5, + "STS17 (spa-Latn_eng-Latn)": 25.28, + "STS17 (ita-Latn_eng-Latn)": 31.8, + "STS17 (spa-Latn)": 78.4, + "STS17 (kor-Hang)": 39.11, + "STS17 (ara-Arab)": 55.42, + "STS17 (nld-Latn_eng-Latn)": 32.89, + "STS22 (en)": 68.39, + "STS22 (spa-Latn_eng-Latn)": 55.09, + "STS22 (deu-Latn_pol-Latn)": 23.53, + "STS22 (cmn-Hans_eng-Latn)": 40.47, + "STS22 (pol-Latn)": 24.21, + "STS22 (tur-Latn)": 29.35, + "STS22 (spa-Latn_ita-Latn)": 41.61, + "STS22 (fra-Latn_pol-Latn)": 73.25, + "STS22 (rus-Cyrl)": 15.83, + "STS22 (deu-Latn)": 27.0, + "STS22 (spa-Latn)": 55.98, + "STS22 (pol-Latn_eng-Latn)": 51.07, + "STS22 (fra-Latn)": 77.1, + "STS22 (deu-Latn_eng-Latn)": 49.73, + "STS22 (ara-Arab)": 38.96, + "STS22 (deu-Latn_fra-Latn)": 31.39, + "STS22 (ita-Latn)": 58.02, + "STS22 (cmn-Hans)": 42.24, + "STSB (cmn-Hans)": 37.7, + "STSBenchmark": 83.42, + "STSBenchmarkMultilingualSTS (nld-Latn)": 57.01, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 55.54, + "STSBenchmarkMultilingualSTS (fra-Latn)": 65.15, + "STSBenchmarkMultilingualSTS (ita-Latn)": 62.72, + "STSBenchmarkMultilingualSTS (spa-Latn)": 65.78, + "STSBenchmarkMultilingualSTS (en)": 83.42, + "STSBenchmarkMultilingualSTS (deu-Latn)": 61.43, + "STSBenchmarkMultilingualSTS (por-Latn)": 62.12, + "STSBenchmarkMultilingualSTS (cmn-Hans)": 39.43, + "STSBenchmarkMultilingualSTS (pol-Latn)": 52.36 } ] }, "Summarization": { "spearman": [ { - "Model": "bge-small-en-v1.5-instruct" + "Model": "all-mpnet-base-v2", + "SummEval": 27.49, + "SummEvalFr (fra-Latn)": 28.11 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "bge-small-en-v1.5-instruct" + "Model": "all-mpnet-base-v2" } ] } }, - "m3e-large": { + "flaubert_base_uncased": { "BitextMining": { "f1": [ { - "Model": "m3e-large" + "Model": "flaubert_base_uncased" } ] }, "Classification": { "accuracy": [ { - "Model": "m3e-large", - "AmazonReviewsClassification (zh)": 44.44, - "IFlyTek": 43.96, - "JDReview": 86.92, - "MassiveIntentClassification (zh-CN)": 67.23, - "MassiveScenarioClassification (zh-CN)": 74.88, - "MultilingualSentiment": 72.47, - "OnlineShopping": 89.59, - "TNews": 48.26, - "Waimai": 86.08 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "m3e-large", - "CLSClusteringP2P": 38.6, - "CLSClusteringS2S": 38.02, - "ThuNewsClusteringP2P": 60.39, - "ThuNewsClusteringS2S": 58.51 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "m3e-large", - "Cmnli": 69.27, - "Ocnli": 59.33 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "m3e-large", - "CMedQAv1": 77.76, - "CMedQAv2": 78.27, - "MMarcoReranking": 16.46, - "T2Reranking": 66.13 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "m3e-large", - "CmedqaRetrieval": 30.73, - "CovidRetrieval": 61.33, - "DuRetrieval": 74.69, - "EcomRetrieval": 45.18, - "MMarcoRetrieval": 61.06, - "MedicalRetrieval": 48.66, - "T2Retrieval": 72.36, - "VideoRetrieval": 44.02 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "m3e-large", - "AFQMC": 36.53, - "ATEC": 41.8, - "BQ": 65.2, - "LCQMC": 74.2, - "PAWSX": 15.95, - "QBQTC": 32.65, - "STS22 (zh)": 62.91, - "STSB": 74.16 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "m3e-large" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "m3e-large" - } - ] - } - }, - "nomic-embed-text-v1.5-128": { - "BitextMining": { - "f1": [ - { - "Model": "nomic-embed-text-v1.5-128" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "nomic-embed-text-v1.5-128", - "AmazonCounterfactualClassification (en)": 69.78, - "AmazonPolarityClassification": 88.74, - "AmazonReviewsClassification (en)": 43.11, - "Banking77Classification": 82.78, - "EmotionClassification": 42.92, - "ImdbClassification": 80.87, - "MTOPDomainClassification (en)": 89.61, - "MTOPIntentClassification (en)": 68.9, - "MassiveIntentClassification (en)": 69.34, - "MassiveScenarioClassification (en)": 74.21, - "ToxicConversationsClassification": 68.16, - "TweetSentimentExtractionClassification": 57.99 + "Model": "flaubert_base_uncased", + "AmazonReviewsClassification (fr)": 23.52, + "MTOPDomainClassification (fr)": 27.74, + "MTOPIntentClassification (fr)": 8.61, + "MasakhaNEWSClassification (fra)": 62.61, + "MassiveIntentClassification (fr)": 6.24, + "MassiveScenarioClassification (fr)": 10.98 } ] }, "Clustering": { "v_measure": [ { - "Model": "nomic-embed-text-v1.5-128", - "ArxivClusteringP2P": 43.87, - "ArxivClusteringS2S": 34.57, - "BiorxivClusteringP2P": 36.79, - "BiorxivClusteringS2S": 30.68, - "MedrxivClusteringP2P": 34.09, - "MedrxivClusteringS2S": 31.3, - "RedditClustering": 53.31, - "RedditClusteringP2P": 58.96, - "StackExchangeClustering": 59.92, - "StackExchangeClusteringP2P": 33.88, - "TwentyNewsgroupsClustering": 47.29 + "Model": "flaubert_base_uncased", + "AlloProfClusteringP2P": 43.2, + "AlloProfClusteringS2S": 12.94, + "HALClusteringS2S": 1.8, + "MLSUMClusteringP2P": 33.22, + "MLSUMClusteringS2S": 14.9, + "MasakhaNEWSClusteringP2P (fra)": 28.49, + "MasakhaNEWSClusteringS2S (fra)": 22.58 } ] }, "PairClassification": { "ap": [ { - "Model": "nomic-embed-text-v1.5-128", - "SprintDuplicateQuestions": 91.45, - "TwitterSemEval2015": 73.23, - "TwitterURLCorpus": 85.93 + "Model": "flaubert_base_uncased", + "OpusparcusPC (fr)": 82.0, + "PawsXPairClassification (fr)": 52.78 } ] }, "Reranking": { "map": [ { - "Model": "nomic-embed-text-v1.5-128", - "AskUbuntuDupQuestions": 61.16, - "MindSmallReranking": 30.02, - "SciDocsRR": 78.05, - "StackOverflowDupQuestions": 49.0 + "Model": "flaubert_base_uncased", + "AlloprofReranking": 34.55, + "SyntecReranking": 57.18 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "nomic-embed-text-v1.5-128", - "ArguAna": 43.4, - "CQADupstackRetrieval": 34.67, - "ClimateFEVER": 36.52, - "DBPedia": 36.22, - "FEVER": 80.48, - "FiQA2018": 32.08, - "HotpotQA": 60.09, - "MSMARCO": 39.99, - "NFCorpus": 30.72, - "NQ": 53.62, - "QuoraRetrieval": 87.07, - "SCIDOCS": 15.56, - "SciFact": 64.28, - "TRECCOVID": 74.58, - "Touche2020": 26.99 + "Model": "flaubert_base_uncased", + "AlloprofRetrieval": 1.72, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 0.51, + "SyntecRetrieval": 22.33, + "XPQARetrieval (fr)": 9.09 } ] }, "STS": { "spearman": [ { - "Model": "nomic-embed-text-v1.5-128", - "BIOSSES": 80.19, - "SICK-R": 79.09, - "STS12": 77.49, - "STS13": 85.62, - "STS14": 80.5, - "STS15": 85.84, - "STS16": 83.9, - "STS17 (en-en)": 86.27, - "STS22 (en)": 64.24, - "STSBenchmark": 84.28 + "Model": "flaubert_base_uncased", + "SICKFr": 41.9, + "STS22 (fr)": 55.15, + "STSBenchmarkMultilingualSTS (fr)": 33.41 } ] }, "Summarization": { "spearman": [ { - "Model": "nomic-embed-text-v1.5-128", - "SummEval": 29.59 + "Model": "flaubert_base_uncased", + "SummEvalFr": 29.43 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "nomic-embed-text-v1.5-128" + "Model": "flaubert_base_uncased" } ] } }, - "dragon-plus-instruct": { + "GritLM-7B-noinstruct": { "BitextMining": { "f1": [ { - "Model": "dragon-plus-instruct" + "Model": "GritLM-7B-noinstruct" } ] }, "Classification": { "accuracy": [ { - "Model": "dragon-plus-instruct" + "Model": "GritLM-7B-noinstruct" } ] }, "Clustering": { "v_measure": [ { - "Model": "dragon-plus-instruct" + "Model": "GritLM-7B-noinstruct" } ] }, "PairClassification": { "ap": [ { - "Model": "dragon-plus-instruct" + "Model": "GritLM-7B-noinstruct" } ] }, "Reranking": { "map": [ { - "Model": "dragon-plus-instruct" + "Model": "GritLM-7B-noinstruct" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "dragon-plus-instruct", - "ARCChallenge": 8.24, - "AlphaNLI": 25.18, - "HellaSwag": 24.06, - "PIQA": 26.35, - "Quail": 4.2, - "RARbCode": 12.84, - "RARbMath": 36.15, - "SIQA": 1.75, - "SpartQA": 10.82, - "TempReasonL1": 1.54, - "TempReasonL2Fact": 16.11, - "TempReasonL2Pure": 0.57, - "TempReasonL3Fact": 14.81, - "TempReasonL3Pure": 7.46, - "WinoGrande": 60.84 + "Model": "GritLM-7B-noinstruct", + "ARCChallenge": 16.57, + "AlphaNLI": 29.56, + "HellaSwag": 36.03, + "PIQA": 35.8, + "Quail": 8.68, + "RARbCode": 83.14, + "RARbMath": 83.01, + "SIQA": 5.73, + "SpartQA": 1.56, + "TempReasonL1": 2.57, + "TempReasonL2Fact": 48.25, + "TempReasonL2Pure": 8.98, + "TempReasonL3Fact": 34.11, + "TempReasonL3Pure": 12.44, + "WinoGrande": 52.12 } ] }, "STS": { "spearman": [ { - "Model": "dragon-plus-instruct" + "Model": "GritLM-7B-noinstruct" } ] }, "Summarization": { "spearman": [ { - "Model": "dragon-plus-instruct" + "Model": "GritLM-7B-noinstruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "dragon-plus-instruct" + "Model": "GritLM-7B-noinstruct" } ] } }, - "st-polish-paraphrase-from-mpnet": { + "sentence-t5-base": { "BitextMining": { "f1": [ { - "Model": "st-polish-paraphrase-from-mpnet" + "Model": "sentence-t5-base" } ] }, "Classification": { "accuracy": [ { - "Model": "st-polish-paraphrase-from-mpnet", - "AllegroReviews": 34.55, - "CBD": 67.48, - "MassiveIntentClassification (pl)": 65.93, - "MassiveScenarioClassification (pl)": 71.85, - "PAC": 63.25, - "PolEmo2.0-IN": 68.37, - "PolEmo2.0-OUT": 30.99 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "st-polish-paraphrase-from-mpnet", - "8TagsClustering": 33.15 - } - ] + "Model": "sentence-t5-base", + "AmazonCounterfactualClassification (de)": 69.98, + "AmazonCounterfactualClassification (en)": 75.82, + "AmazonCounterfactualClassification (en-ext)": 76.81, + "AmazonCounterfactualClassification (ja)": 46.05, + "AmazonPolarityClassification": 85.12, + "AmazonReviewsClassification (de)": 37.9, + "AmazonReviewsClassification (en)": 44.94, + "AmazonReviewsClassification (es)": 37.33, + "AmazonReviewsClassification (fr)": 37.35, + "AmazonReviewsClassification (ja)": 22.29, + "AmazonReviewsClassification (zh)": 21.53, + "Banking77Classification": 76.48, + "EmotionClassification": 51.35, + "ImdbClassification": 77.34, + "MTOPDomainClassification (de)": 76.98, + "MTOPDomainClassification (en)": 90.34, + "MTOPDomainClassification (es)": 73.61, + "MTOPDomainClassification (fr)": 75.03, + "MTOPDomainClassification (hi)": 21.4, + "MTOPDomainClassification (th)": 16.21, + "MTOPIntentClassification (de)": 44.43, + "MTOPIntentClassification (en)": 63.32, + "MTOPIntentClassification (es)": 42.03, + "MTOPIntentClassification (fr)": 43.85, + "MTOPIntentClassification (hi)": 3.8, + "MTOPIntentClassification (th)": 5.21, + "MasakhaNEWSClassification (fra)": 81.21, + "MassiveIntentClassification (af)": 34.32, + "MassiveIntentClassification (am)": 2.38, + "MassiveIntentClassification (ar)": 4.53, + "MassiveIntentClassification (az)": 31.76, + "MassiveIntentClassification (bn)": 2.58, + "MassiveIntentClassification (cy)": 28.94, + "MassiveIntentClassification (da)": 38.82, + "MassiveIntentClassification (de)": 45.23, + "MassiveIntentClassification (el)": 10.05, + "MassiveIntentClassification (en)": 69.74, + "MassiveIntentClassification (es)": 45.32, + "MassiveIntentClassification (fa)": 3.58, + "MassiveIntentClassification (fi)": 33.52, + "MassiveIntentClassification (fr)": 51.13, + "MassiveIntentClassification (he)": 2.63, + "MassiveIntentClassification (hi)": 2.68, + "MassiveIntentClassification (hu)": 32.31, + "MassiveIntentClassification (hy)": 3.33, + "MassiveIntentClassification (id)": 35.5, + "MassiveIntentClassification (is)": 29.82, + "MassiveIntentClassification (it)": 45.59, + "MassiveIntentClassification (ja)": 3.67, + "MassiveIntentClassification (jv)": 31.15, + "MassiveIntentClassification (ka)": 2.77, + "MassiveIntentClassification (km)": 5.66, + "MassiveIntentClassification (kn)": 2.59, + "MassiveIntentClassification (ko)": 2.34, + "MassiveIntentClassification (lv)": 33.97, + "MassiveIntentClassification (ml)": 2.55, + "MassiveIntentClassification (mn)": 14.7, + "MassiveIntentClassification (ms)": 33.12, + "MassiveIntentClassification (my)": 4.42, + "MassiveIntentClassification (nb)": 38.53, + "MassiveIntentClassification (nl)": 37.96, + "MassiveIntentClassification (pl)": 34.41, + "MassiveIntentClassification (pt)": 43.35, + "MassiveIntentClassification (ro)": 42.69, + "MassiveIntentClassification (ru)": 14.82, + "MassiveIntentClassification (sl)": 34.54, + "MassiveIntentClassification (sq)": 38.54, + "MassiveIntentClassification (sv)": 35.98, + "MassiveIntentClassification (sw)": 32.14, + "MassiveIntentClassification (ta)": 1.41, + "MassiveIntentClassification (te)": 2.5, + "MassiveIntentClassification (th)": 3.71, + "MassiveIntentClassification (tl)": 36.04, + "MassiveIntentClassification (tr)": 33.77, + "MassiveIntentClassification (ur)": 2.99, + "MassiveIntentClassification (vi)": 22.62, + "MassiveIntentClassification (zh-CN)": 1.12, + "MassiveIntentClassification (zh-TW)": 4.63, + "MassiveScenarioClassification (af)": 44.45, + "MassiveScenarioClassification (am)": 7.51, + "MassiveScenarioClassification (ar)": 12.32, + "MassiveScenarioClassification (az)": 38.41, + "MassiveScenarioClassification (bn)": 8.45, + "MassiveScenarioClassification (cy)": 35.04, + "MassiveScenarioClassification (da)": 48.36, + "MassiveScenarioClassification (de)": 59.12, + "MassiveScenarioClassification (el)": 17.68, + "MassiveScenarioClassification (en)": 72.32, + "MassiveScenarioClassification (es)": 55.61, + "MassiveScenarioClassification (fa)": 6.86, + "MassiveScenarioClassification (fi)": 41.34, + "MassiveScenarioClassification (fr)": 59.92, + "MassiveScenarioClassification (he)": 7.86, + "MassiveScenarioClassification (hi)": 7.63, + "MassiveScenarioClassification (hu)": 41.31, + "MassiveScenarioClassification (hy)": 9.23, + "MassiveScenarioClassification (id)": 44.64, + "MassiveScenarioClassification (is)": 39.63, + "MassiveScenarioClassification (it)": 54.58, + "MassiveScenarioClassification (ja)": 4.96, + "MassiveScenarioClassification (jv)": 40.73, + "MassiveScenarioClassification (ka)": 7.51, + "MassiveScenarioClassification (km)": 8.73, + "MassiveScenarioClassification (kn)": 7.99, + "MassiveScenarioClassification (ko)": 6.03, + "MassiveScenarioClassification (lv)": 36.42, + "MassiveScenarioClassification (ml)": 6.96, + "MassiveScenarioClassification (mn)": 19.85, + "MassiveScenarioClassification (ms)": 43.18, + "MassiveScenarioClassification (my)": 9.46, + "MassiveScenarioClassification (nb)": 46.6, + "MassiveScenarioClassification (nl)": 50.0, + "MassiveScenarioClassification (pl)": 42.3, + "MassiveScenarioClassification (pt)": 52.24, + "MassiveScenarioClassification (ro)": 53.7, + "MassiveScenarioClassification (ru)": 20.69, + "MassiveScenarioClassification (sl)": 39.79, + "MassiveScenarioClassification (sq)": 50.16, + "MassiveScenarioClassification (sv)": 46.69, + "MassiveScenarioClassification (sw)": 40.48, + "MassiveScenarioClassification (ta)": 7.47, + "MassiveScenarioClassification (te)": 6.87, + "MassiveScenarioClassification (th)": 8.26, + "MassiveScenarioClassification (tl)": 48.94, + "MassiveScenarioClassification (tr)": 41.83, + "MassiveScenarioClassification (ur)": 9.77, + "MassiveScenarioClassification (vi)": 30.01, + "MassiveScenarioClassification (zh-CN)": 4.17, + "MassiveScenarioClassification (zh-TW)": 7.91, + "ToxicConversationsClassification": 68.2, + "TweetSentimentExtractionClassification": 62.71 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "sentence-t5-base", + "AlloProfClusteringP2P": 58.44, + "AlloProfClusteringS2S": 35.93, + "ArxivClusteringP2P": 39.28, + "ArxivClusteringS2S": 27.26, + "BiorxivClusteringP2P": 33.99, + "BiorxivClusteringS2S": 22.92, + "BlurbsClusteringP2P": 30.59, + "BlurbsClusteringS2S": 11.57, + "HALClusteringS2S": 17.72, + "MLSUMClusteringP2P": 40.77, + "MLSUMClusteringS2S": 30.06, + "MasakhaNEWSClusteringP2P (fra)": 61.9, + "MasakhaNEWSClusteringS2S (fra)": 35.64, + "MedrxivClusteringP2P": 33.2, + "MedrxivClusteringS2S": 26.13, + "RedditClustering": 52.93, + "RedditClusteringP2P": 59.67, + "StackExchangeClustering": 63.13, + "StackExchangeClusteringP2P": 35.68, + "TenKGnadClusteringP2P": 44.88, + "TenKGnadClusteringS2S": 18.11, + "TwentyNewsgroupsClustering": 48.1 + } + ] }, "PairClassification": { "ap": [ { - "Model": "st-polish-paraphrase-from-mpnet", - "CDSC-E": 75.06, - "PPC": 93.49, - "PSC": 99.05, - "SICK-E-PL": 80.56 + "Model": "sentence-t5-base", + "OpusparcusPC (fr)": 89.4, + "PawsXPairClassification (fr)": 55.35, + "SprintDuplicateQuestions": 91.23, + "TwitterSemEval2015": 78.25, + "TwitterURLCorpus": 86.05 } ] }, "Reranking": { "map": [ { - "Model": "st-polish-paraphrase-from-mpnet" + "Model": "sentence-t5-base", + "AlloprofReranking": 50.12, + "AskUbuntuDupQuestions": 59.73, + "MindSmallReranking": 30.2, + "SciDocsRR": 73.96, + "StackOverflowDupQuestions": 48.46, + "SyntecReranking": 78.05 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "st-polish-paraphrase-from-mpnet", - "ArguAna-PL": 51.87, - "DBPedia-PL": 24.59, - "FiQA-PL": 22.27, - "HotpotQA-PL": 32.11, - "MSMARCO-PL": 17.91, - "NFCorpus-PL": 24.05, - "NQ-PL": 23.54, - "Quora-PL": 81.49, - "SCIDOCS-PL": 13.23, - "SciFact-PL": 52.51, - "TRECCOVID-PL": 35.23 + "Model": "sentence-t5-base", + "AlloprofRetrieval": 27.52, + "ArguAna": 44.85, + "BSARDRetrieval": 0.16, + "CQADupstackRetrieval": 35.23, + "ClimateFEVER": 10.37, + "DBPedia": 27.77, + "FEVER": 26.17, + "FiQA2018": 34.83, + "HotpotQA": 33.2, + "MSMARCO": 20.7, + "MintakaRetrieval (fr)": 21.04, + "NFCorpus": 28.65, + "NQ": 36.32, + "QuoraRetrieval": 85.49, + "SCIDOCS": 14.15, + "SciFact": 45.76, + "SyntecRetrieval": 67.0, + "TRECCOVID": 40.7, + "Touche2020": 20.3, + "XPQARetrieval (fr)": 45.19 } ] }, "STS": { "spearman": [ { - "Model": "st-polish-paraphrase-from-mpnet", - "CDSC-R": 88.55, - "SICK-R-PL": 76.18, - "STS22 (pl)": 37.34 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "st-polish-paraphrase-from-mpnet" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "st-polish-paraphrase-from-mpnet" - } - ] - } - }, - "USER-bge-m3": { - "BitextMining": { - "f1": [ - { - "Model": "USER-bge-m3", - "Tatoeba (rus-Cyrl_eng-Latn)": 93.52 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "USER-bge-m3", - "GeoreviewClassification (rus-Cyrl)": 50.98, - "HeadlineClassification (rus-Cyrl)": 70.09, - "InappropriatenessClassification (rus-Cyrl)": 60.76, - "KinopoiskClassification (rus-Cyrl)": 63.33, - "MassiveIntentClassification (rus-Cyrl)": 68.85, - "MassiveScenarioClassification (rus-Cyrl)": 72.9, - "RuReviewsClassification (rus-Cyrl)": 68.52, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 57.67, - "RuSciBenchOECDClassification (rus-Cyrl)": 44.2 + "Model": "sentence-t5-base", + "BIOSSES": 75.89, + "SICK-R": 80.18, + "SICKFr": 71.74, + "STS12": 78.05, + "STS13": 85.85, + "STS14": 82.19, + "STS15": 87.46, + "STS16": 84.03, + "STS17 (ar-ar)": 13.36, + "STS17 (en-ar)": -5.65, + "STS17 (en-de)": 67.11, + "STS17 (en-en)": 89.57, + "STS17 (en-tr)": -0.02, + "STS17 (es-en)": 47.72, + "STS17 (es-es)": 79.94, + "STS17 (fr-en)": 56.61, + "STS17 (it-en)": 30.46, + "STS17 (ko-ko)": 10.06, + "STS17 (nl-en)": 36.46, + "STS22 (ar)": 31.2, + "STS22 (de)": 42.08, + "STS22 (de-en)": 46.9, + "STS22 (de-fr)": 55.04, + "STS22 (de-pl)": 33.94, + "STS22 (en)": 62.66, + "STS22 (es)": 53.81, + "STS22 (es-en)": 65.19, + "STS22 (es-it)": 55.29, + "STS22 (fr)": 77.69, + "STS22 (fr-pl)": 28.17, + "STS22 (it)": 60.65, + "STS22 (pl)": 24.42, + "STS22 (pl-en)": 42.97, + "STS22 (ru)": 12.13, + "STS22 (tr)": 40.45, + "STS22 (zh)": 32.9, + "STS22 (zh-en)": 20.15, + "STSBenchmark": 85.52, + "STSBenchmarkMultilingualSTS (fr)": 74.04 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "sentence-t5-base", + "SummEval": 31.39, + "SummEvalFr": 30.01 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "sentence-t5-base" + } + ] + } + }, + "nomic-embed-text-v1.5-256": { + "BitextMining": { + "f1": [ + { + "Model": "nomic-embed-text-v1.5-256" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "nomic-embed-text-v1.5-256", + "AmazonCounterfactualClassification (en)": 72.94, + "AmazonPolarityClassification": 91.35, + "AmazonReviewsClassification (en)": 45.73, + "Banking77Classification": 83.69, + "EmotionClassification": 45.88, + "ImdbClassification": 83.99, + "MTOPDomainClassification (en)": 91.68, + "MTOPIntentClassification (en)": 72.47, + "MassiveIntentClassification (en)": 71.76, + "MassiveScenarioClassification (en)": 75.67, + "ToxicConversationsClassification": 70.87, + "TweetSentimentExtractionClassification": 59.2 } ] }, "Clustering": { "v_measure": [ { - "Model": "USER-bge-m3", - "GeoreviewClusteringP2P (rus-Cyrl)": 62.79, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 53.11, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.93 + "Model": "nomic-embed-text-v1.5-256", + "ArxivClusteringP2P": 44.82, + "ArxivClusteringS2S": 35.32, + "BiorxivClusteringP2P": 38.19, + "BiorxivClusteringS2S": 31.83, + "MedrxivClusteringP2P": 34.08, + "MedrxivClusteringS2S": 30.98, + "RedditClustering": 54.92, + "RedditClusteringP2P": 60.23, + "StackExchangeClustering": 61.81, + "StackExchangeClusteringP2P": 34.03, + "TwentyNewsgroupsClustering": 48.56 } ] }, "PairClassification": { "ap": [ { - "Model": "USER-bge-m3", - "OpusparcusPC (rus-Cyrl)": 90.73, - "TERRa (rus-Cyrl)": 64.99 + "Model": "nomic-embed-text-v1.5-256", + "SprintDuplicateQuestions": 92.31, + "TwitterSemEval2015": 73.61, + "TwitterURLCorpus": 86.34 } ] }, "Reranking": { "map": [ { - "Model": "USER-bge-m3", - "RuBQReranking (rus-Cyrl)": 73.08 + "Model": "nomic-embed-text-v1.5-256", + "AskUbuntuDupQuestions": 61.34, + "MindSmallReranking": 30.04, + "SciDocsRR": 79.4, + "StackOverflowDupQuestions": 49.95 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "USER-bge-m3", - "RiaNewsRetrieval (rus-Cyrl)": 83.53, - "RuBQRetrieval (rus-Cyrl)": 70.03 + "Model": "nomic-embed-text-v1.5-256", + "ArguAna": 45.44, + "CQADupstackRetrieval": 37.61, + "ClimateFEVER": 39.63, + "DBPedia": 39.42, + "FEVER": 84.4, + "FiQA2018": 35.0, + "HotpotQA": 67.78, + "MSMARCO": 41.38, + "NFCorpus": 32.54, + "NQ": 57.1, + "QuoraRetrieval": 87.65, + "SCIDOCS": 16.76, + "SciFact": 68.24, + "TRECCOVID": 80.65, + "Touche2020": 28.49 } ] }, "STS": { "spearman": [ { - "Model": "USER-bge-m3", - "RUParaPhraserSTS (rus-Cyrl)": 76.36, - "RuSTSBenchmarkSTS (rus-Cyrl)": 83.35, - "STS22 (rus-Cyrl)": 66.42, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 82.96 + "Model": "nomic-embed-text-v1.5-256", + "BIOSSES": 81.58, + "SICK-R": 79.24, + "STS12": 78.16, + "STS13": 86.01, + "STS14": 81.25, + "STS15": 86.51, + "STS16": 84.24, + "STS17 (en-en)": 86.44, + "STS22 (en)": 65.14, + "STSBenchmark": 84.8 } ] }, "Summarization": { "spearman": [ { - "Model": "USER-bge-m3" + "Model": "nomic-embed-text-v1.5-256", + "SummEval": 30.05 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "USER-bge-m3" + "Model": "nomic-embed-text-v1.5-256" } ] } }, - "Cohere-embed-multilingual-v3.0": { + "text-embedding-3-small-instruct": { "BitextMining": { "f1": [ { - "Model": "Cohere-embed-multilingual-v3.0" + "Model": "text-embedding-3-small-instruct" } ] }, "Classification": { "accuracy": [ { - "Model": "Cohere-embed-multilingual-v3.0", - "AmazonReviewsClassification (fr)": 41.89, - "MTOPDomainClassification (fr)": 86.23, - "MTOPIntentClassification (fr)": 61.07, - "MasakhaNEWSClassification (fra)": 83.06, - "MassiveIntentClassification (fr)": 62.94, - "MassiveScenarioClassification (fr)": 67.29 + "Model": "text-embedding-3-small-instruct" } ] }, "Clustering": { "v_measure": [ { - "Model": "Cohere-embed-multilingual-v3.0", - "AlloProfClusteringP2P": 63.53, - "AlloProfClusteringS2S": 36.18, - "HALClusteringS2S": 19.9, - "MLSUMClusteringP2P": 45.08, - "MLSUMClusteringS2S": 34.75, - "MasakhaNEWSClusteringP2P (fra)": 53.18, - "MasakhaNEWSClusteringS2S (fra)": 32.31 + "Model": "text-embedding-3-small-instruct" } ] }, "PairClassification": { "ap": [ { - "Model": "Cohere-embed-multilingual-v3.0", - "OpusparcusPC (fr)": 94.08, - "PawsXPairClassification (fr)": 61.26 + "Model": "text-embedding-3-small-instruct" } ] }, "Reranking": { "map": [ { - "Model": "Cohere-embed-multilingual-v3.0", - "AlloprofReranking": 51.01, - "SyntecReranking": 85.72 + "Model": "text-embedding-3-small-instruct" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "Cohere-embed-multilingual-v3.0", - "AlloprofRetrieval": 38.36, - "BSARDRetrieval": 0.14, - "MintakaRetrieval (fr)": 25.44, - "SyntecRetrieval": 79.27, - "XPQARetrieval (fr)": 58.87 + "Model": "text-embedding-3-small-instruct", + "ARCChallenge": 13.76, + "AlphaNLI": 21.14, + "HellaSwag": 27.2, + "PIQA": 29.59, + "Quail": 6.64, + "RARbCode": 72.14, + "RARbMath": 64.31, + "SIQA": 2.98, + "SpartQA": 3.58, + "TempReasonL1": 2.29, + "TempReasonL2Fact": 26.34, + "TempReasonL2Pure": 3.17, + "TempReasonL3Fact": 22.72, + "TempReasonL3Pure": 9.98, + "WinoGrande": 25.49 } ] }, "STS": { "spearman": [ { - "Model": "Cohere-embed-multilingual-v3.0", - "SICKFr": 79.23, - "STS22 (fr)": 82.76, - "STSBenchmarkMultilingualSTS (fr)": 81.84 + "Model": "text-embedding-3-small-instruct" } ] }, "Summarization": { "spearman": [ { - "Model": "Cohere-embed-multilingual-v3.0", - "SummEvalFr": 31.26 + "Model": "text-embedding-3-small-instruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "Cohere-embed-multilingual-v3.0" + "Model": "text-embedding-3-small-instruct" } ] } }, - "multilingual-e5-large": { + "e5-large-v2": { "BitextMining": { "f1": [ { - "Model": "multilingual-e5-large", - "BornholmBitextMining (dan-Latn)": 29.61, - "BornholmBitextMining": 44.16, - "Tatoeba (tgl-Latn_eng-Latn)": 92.0, - "Tatoeba (gsw-Latn_eng-Latn)": 51.65, - "Tatoeba (tzl-Latn_eng-Latn)": 53.16, - "Tatoeba (slv-Latn_eng-Latn)": 89.57, - "Tatoeba (jav-Latn_eng-Latn)": 75.46, - "Tatoeba (uig-Arab_eng-Latn)": 72.17, - "Tatoeba (ind-Latn_eng-Latn)": 92.9, - "Tatoeba (rus-Cyrl_eng-Latn)": 92.32, - "Tatoeba (war-Latn_eng-Latn)": 62.02, - "Tatoeba (mar-Deva_eng-Latn)": 88.58, - "Tatoeba (mkd-Cyrl_eng-Latn)": 85.63, - "Tatoeba (jpn-Jpan_eng-Latn)": 95.28, - "Tatoeba (hun-Latn_eng-Latn)": 94.01, - "Tatoeba (slk-Latn_eng-Latn)": 93.13, - "Tatoeba (tha-Thai_eng-Latn)": 95.38, - "Tatoeba (fra-Latn_eng-Latn)": 93.42, - "Tatoeba (ukr-Cyrl_eng-Latn)": 93.32, - "Tatoeba (kat-Geor_eng-Latn)": 84.09, - "Tatoeba (nov-Latn_eng-Latn)": 71.62, - "Tatoeba (kor-Hang_eng-Latn)": 90.65, - "Tatoeba (ben-Beng_eng-Latn)": 83.02, - "Tatoeba (cor-Latn_eng-Latn)": 6.28, - "Tatoeba (lfn-Latn_eng-Latn)": 62.91, - "Tatoeba (swh-Latn_eng-Latn)": 71.61, - "Tatoeba (tur-Latn_eng-Latn)": 96.27, - "Tatoeba (cbk-Latn_eng-Latn)": 69.26, - "Tatoeba (kur-Latn_eng-Latn)": 66.83, - "Tatoeba (arq-Arab_eng-Latn)": 41.56, - "Tatoeba (ceb-Latn_eng-Latn)": 55.31, - "Tatoeba (max-Deva_eng-Latn)": 63.41, - "Tatoeba (ang-Latn_eng-Latn)": 40.18, - "Tatoeba (nds-Latn_eng-Latn)": 69.28, - "Tatoeba (epo-Latn_eng-Latn)": 96.01, - "Tatoeba (heb-Hebr_eng-Latn)": 86.61, - "Tatoeba (yue-Hant_eng-Latn)": 88.71, - "Tatoeba (dan-Latn_eng-Latn)": 95.08, - "Tatoeba (swe-Latn_eng-Latn)": 95.3, - "Tatoeba (lvs-Latn_eng-Latn)": 90.06, - "Tatoeba (ast-Latn_eng-Latn)": 81.76, - "Tatoeba (dsb-Latn_eng-Latn)": 48.44, - "Tatoeba (pes-Arab_eng-Latn)": 92.14, - "Tatoeba (dtp-Latn_eng-Latn)": 7.03, - "Tatoeba (tuk-Latn_eng-Latn)": 33.15, - "Tatoeba (isl-Latn_eng-Latn)": 92.09, - "Tatoeba (khm-Khmr_eng-Latn)": 59.96, - "Tatoeba (pam-Latn_eng-Latn)": 9.32, - "Tatoeba (tat-Cyrl_eng-Latn)": 73.51, - "Tatoeba (bos-Latn_eng-Latn)": 92.86, - "Tatoeba (spa-Latn_eng-Latn)": 97.1, - "Tatoeba (kaz-Cyrl_eng-Latn)": 79.67, - "Tatoeba (bel-Cyrl_eng-Latn)": 91.08, - "Tatoeba (zsm-Latn_eng-Latn)": 94.53, - "Tatoeba (cat-Latn_eng-Latn)": 91.03, - "Tatoeba (urd-Arab_eng-Latn)": 89.21, - "Tatoeba (mon-Cyrl_eng-Latn)": 87.53, - "Tatoeba (tam-Taml_eng-Latn)": 88.23, - "Tatoeba (fry-Latn_eng-Latn)": 63.43, - "Tatoeba (nob-Latn_eng-Latn)": 97.2, - "Tatoeba (tel-Telu_eng-Latn)": 91.34, - "Tatoeba (hye-Armn_eng-Latn)": 90.92, - "Tatoeba (awa-Deva_eng-Latn)": 72.27, - "Tatoeba (hrv-Latn_eng-Latn)": 96.15, - "Tatoeba (ile-Latn_eng-Latn)": 79.16, - "Tatoeba (amh-Ethi_eng-Latn)": 80.69, - "Tatoeba (orv-Cyrl_eng-Latn)": 39.87, - "Tatoeba (ara-Arab_eng-Latn)": 85.48, - "Tatoeba (ido-Latn_eng-Latn)": 83.52, - "Tatoeba (hin-Deva_eng-Latn)": 94.48, - "Tatoeba (por-Latn_eng-Latn)": 93.63, - "Tatoeba (ron-Latn_eng-Latn)": 94.87, - "Tatoeba (swg-Latn_eng-Latn)": 55.64, - "Tatoeba (cmn-Hans_eng-Latn)": 95.28, - "Tatoeba (pol-Latn_eng-Latn)": 96.6, - "Tatoeba (bul-Cyrl_eng-Latn)": 92.93, - "Tatoeba (ina-Latn_eng-Latn)": 93.47, - "Tatoeba (bre-Latn_eng-Latn)": 11.1, - "Tatoeba (wuu-Hans_eng-Latn)": 86.37, - "Tatoeba (lit-Latn_eng-Latn)": 88.48, - "Tatoeba (csb-Latn_eng-Latn)": 36.98, - "Tatoeba (lat-Latn_eng-Latn)": 53.37, - "Tatoeba (gle-Latn_eng-Latn)": 71.48, - "Tatoeba (ita-Latn_eng-Latn)": 93.29, - "Tatoeba (srp-Cyrl_eng-Latn)": 93.1, - "Tatoeba (arz-Arab_eng-Latn)": 74.73, - "Tatoeba (cym-Latn_eng-Latn)": 76.21, - "Tatoeba (ber-Tfng_eng-Latn)": 38.9, - "Tatoeba (xho-Latn_eng-Latn)": 80.87, - "Tatoeba (uzb-Latn_eng-Latn)": 72.35, - "Tatoeba (pms-Latn_eng-Latn)": 59.85, - "Tatoeba (est-Latn_eng-Latn)": 85.03, - "Tatoeba (deu-Latn_eng-Latn)": 99.07, - "Tatoeba (yid-Hebr_eng-Latn)": 76.33, - "Tatoeba (ell-Grek_eng-Latn)": 93.88, - "Tatoeba (afr-Latn_eng-Latn)": 90.22, - "Tatoeba (fao-Latn_eng-Latn)": 72.62, - "Tatoeba (nld-Latn_eng-Latn)": 96.63, - "Tatoeba (hsb-Latn_eng-Latn)": 58.9, - "Tatoeba (aze-Latn_eng-Latn)": 87.61, - "Tatoeba (kzj-Latn_eng-Latn)": 7.91, - "Tatoeba (kab-Latn_eng-Latn)": 36.54, - "Tatoeba (mal-Mlym_eng-Latn)": 97.7, - "Tatoeba (mhr-Cyrl_eng-Latn)": 6.79, - "Tatoeba (ces-Latn_eng-Latn)": 94.89, - "Tatoeba (gla-Latn_eng-Latn)": 59.0, - "Tatoeba (cha-Latn_eng-Latn)": 27.16, - "Tatoeba (glg-Latn_eng-Latn)": 93.34, - "Tatoeba (vie-Latn_eng-Latn)": 97.0, - "Tatoeba (oci-Latn_eng-Latn)": 54.91, - "Tatoeba (nno-Latn_eng-Latn)": 91.4, - "Tatoeba (fin-Latn_eng-Latn)": 95.44, - "Tatoeba (eus-Latn_eng-Latn)": 77.82, - "Tatoeba (sqi-Latn_eng-Latn)": 94.7 + "Model": "e5-large-v2" } ] }, "Classification": { "accuracy": [ { - "Model": "multilingual-e5-large", - "AllegroReviews (pol-Latn)": 41.04, - "AllegroReviews": 41.14, - "AmazonCounterfactualClassification (en-ext)": 78.73, - "AmazonCounterfactualClassification (en)": 78.67, - "AmazonCounterfactualClassification (deu-Latn)": 68.66, - "AmazonCounterfactualClassification (jpn-Jpan)": 78.8, - "AmazonPolarityClassification": 93.26, - "AmazonReviewsClassification (en)": 49.2, - "AmazonReviewsClassification (deu-Latn)": 46.5, - "AmazonReviewsClassification (spa-Latn)": 44.35, - "AmazonReviewsClassification (fra-Latn)": 42.55, - "AmazonReviewsClassification (jpn-Jpan)": 41.71, - "AmazonReviewsClassification (cmn-Hans)": 38.87, - "AmazonReviewsClassification (fr)": 41.91, - "AngryTweetsClassification (dan-Latn)": 57.69, - "AngryTweetsClassification": 54.95, - "Banking77Classification": 75.88, - "CBD (pol-Latn)": 69.84, - "CBD": 69.9, - "DKHateClassification": 66.02, - "DanishPoliticalCommentsClassification (dan-Latn)": 39.43, - "DanishPoliticalCommentsClassification": 38.27, - "EmotionClassification": 47.58, - "GeoreviewClassification (rus-Cyrl)": 49.69, - "HeadlineClassification (rus-Cyrl)": 77.19, - "IFlyTek (cmn-Hans)": 41.86, - "IFlyTek": 45.47, - "ImdbClassification": 90.23, - "InappropriatenessClassification (rus-Cyrl)": 61.6, - "JDReview (cmn-Hans)": 80.54, - "JDReview": 80.99, - "KinopoiskClassification (rus-Cyrl)": 56.59, - "LccSentimentClassification (dan-Latn)": 61.53, - "LccSentimentClassification": 59.6, - "MTOPDomainClassification (en)": 91.81, - "MTOPDomainClassification (deu-Latn)": 90.44, - "MTOPDomainClassification (spa-Latn)": 88.34, - "MTOPDomainClassification (fra-Latn)": 86.23, - "MTOPDomainClassification (hin-Deva)": 86.84, - "MTOPDomainClassification (tha-Thai)": 86.88, - "MTOPDomainClassification (fr)": 86.41, - "MTOPIntentClassification (en)": 64.29, - "MTOPIntentClassification (deu-Latn)": 65.97, - "MTOPIntentClassification (spa-Latn)": 61.9, - "MTOPIntentClassification (fra-Latn)": 56.25, - "MTOPIntentClassification (hin-Deva)": 59.17, - "MTOPIntentClassification (tha-Thai)": 62.59, - "MTOPIntentClassification (fr)": 59.43, - "MasakhaNEWSClassification (amh-Ethi)": 83.7, - "MasakhaNEWSClassification (eng)": 78.26, - "MasakhaNEWSClassification (fra-Latn)": 76.11, - "MasakhaNEWSClassification (hau-Latn)": 76.17, - "MasakhaNEWSClassification (ibo-Latn)": 70.05, - "MasakhaNEWSClassification (lin-Latn)": 75.89, - "MasakhaNEWSClassification (lug-Latn)": 73.63, - "MasakhaNEWSClassification (orm-Ethi)": 80.31, - "MasakhaNEWSClassification (pcm-Latn)": 89.15, - "MasakhaNEWSClassification (run-Latn)": 76.55, - "MasakhaNEWSClassification (sna-Latn)": 86.99, - "MasakhaNEWSClassification (som-Latn)": 64.63, - "MasakhaNEWSClassification (swa-Latn)": 73.42, - "MasakhaNEWSClassification (tir-Ethi)": 72.06, - "MasakhaNEWSClassification (xho-Latn)": 82.56, - "MasakhaNEWSClassification (yor-Latn)": 81.09, - "MasakhaNEWSClassification (fra)": 79.38, - "MassiveIntentClassification (kor-Kore)": 63.92, - "MassiveIntentClassification (lav-Latn)": 58.31, - "MassiveIntentClassification (isl-Latn)": 53.3, - "MassiveIntentClassification (tel-Telu)": 53.96, - "MassiveIntentClassification (mya-Mymr)": 49.73, - "MassiveIntentClassification (nob-Latn)": 64.54, - "MassiveIntentClassification (en)": 68.51, - "MassiveIntentClassification (spa-Latn)": 64.01, - "MassiveIntentClassification (swe-Latn)": 66.52, - "MassiveIntentClassification (cmo-Hant)": 58.78, - "MassiveIntentClassification (pol-Latn)": 65.09, - "MassiveIntentClassification (rus-Cyrl)": 65.76, - "MassiveIntentClassification (aze-Latn)": 54.68, - "MassiveIntentClassification (fin-Latn)": 64.28, - "MassiveIntentClassification (cmo-Hans)": 66.23, - "MassiveIntentClassification (urd-Arab)": 54.6, - "MassiveIntentClassification (tam-Taml)": 53.41, - "MassiveIntentClassification (hin-Deva)": 60.93, - "MassiveIntentClassification (deu-Latn)": 63.82, - "MassiveIntentClassification (ell-Grek)": 64.34, - "MassiveIntentClassification (hye-Armn)": 50.89, - "MassiveIntentClassification (por-Latn)": 65.6, - "MassiveIntentClassification (nld-Latn)": 65.0, - "MassiveIntentClassification (fas-Arab)": 63.74, - "MassiveIntentClassification (ron-Latn)": 59.76, - "MassiveIntentClassification (slv-Latn)": 59.38, - "MassiveIntentClassification (heb-Hebr)": 62.44, - "MassiveIntentClassification (vie-Latn)": 63.39, - "MassiveIntentClassification (sqi-Latn)": 57.3, - "MassiveIntentClassification (khm-Khmr)": 34.88, - "MassiveIntentClassification (ben-Beng)": 55.6, - "MassiveIntentClassification (tgl-Latn)": 54.77, - "MassiveIntentClassification (jpn-Jpan)": 67.11, - "MassiveIntentClassification (kat-Geor)": 41.45, - "MassiveIntentClassification (afr-Latn)": 53.69, - "MassiveIntentClassification (cym-Latn)": 44.22, - "MassiveIntentClassification (amh-Ethi)": 45.48, - "MassiveIntentClassification (ita-Latn)": 63.89, - "MassiveIntentClassification (mal-Mlym)": 57.58, - "MassiveIntentClassification (tha-Thai)": 62.75, - "MassiveIntentClassification (ind-Latn)": 63.51, - "MassiveIntentClassification (jav-Latn)": 48.96, - "MassiveIntentClassification (dan-Latn)": 63.7, - "MassiveIntentClassification (ara-Arab)": 54.1, - "MassiveIntentClassification (kan-Knda)": 53.45, - "MassiveIntentClassification (hun-Latn)": 64.0, - "MassiveIntentClassification (tur-Latn)": 64.61, - "MassiveIntentClassification (msa-Latn)": 58.49, - "MassiveIntentClassification (mon-Cyrl)": 49.6, - "MassiveIntentClassification (swa-Latn)": 47.69, - "MassiveIntentClassification (fra-Latn)": 63.37, - "MassiveIntentClassification (da)": 60.16, - "MassiveIntentClassification (nb)": 59.83, - "MassiveIntentClassification (sv)": 61.78, - "MassiveIntentClassification (pl)": 65.07, - "MassiveScenarioClassification (heb-Hebr)": 67.72, - "MassiveScenarioClassification (vie-Latn)": 68.91, - "MassiveScenarioClassification (cmo-Hant)": 64.35, - "MassiveScenarioClassification (urd-Arab)": 60.89, - "MassiveScenarioClassification (isl-Latn)": 60.74, - "MassiveScenarioClassification (ell-Grek)": 69.74, - "MassiveScenarioClassification (mon-Cyrl)": 55.37, - "MassiveScenarioClassification (swa-Latn)": 56.27, - "MassiveScenarioClassification (tam-Taml)": 58.76, - "MassiveScenarioClassification (hye-Armn)": 55.76, - "MassiveScenarioClassification (amh-Ethi)": 52.69, - "MassiveScenarioClassification (ben-Beng)": 61.85, - "MassiveScenarioClassification (tel-Telu)": 59.49, - "MassiveScenarioClassification (dan-Latn)": 71.18, - "MassiveScenarioClassification (slv-Latn)": 65.33, - "MassiveScenarioClassification (en)": 73.04, - "MassiveScenarioClassification (rus-Cyrl)": 70.85, - "MassiveScenarioClassification (mal-Mlym)": 63.17, - "MassiveScenarioClassification (sqi-Latn)": 63.79, - "MassiveScenarioClassification (ita-Latn)": 69.45, - "MassiveScenarioClassification (kor-Kore)": 70.54, - "MassiveScenarioClassification (cmo-Hans)": 72.25, - "MassiveScenarioClassification (cym-Latn)": 51.25, - "MassiveScenarioClassification (pol-Latn)": 69.83, - "MassiveScenarioClassification (ind-Latn)": 69.43, - "MassiveScenarioClassification (tur-Latn)": 68.12, - "MassiveScenarioClassification (tgl-Latn)": 60.71, - "MassiveScenarioClassification (hin-Deva)": 66.85, - "MassiveScenarioClassification (spa-Latn)": 69.07, - "MassiveScenarioClassification (lav-Latn)": 64.28, - "MassiveScenarioClassification (mya-Mymr)": 54.03, - "MassiveScenarioClassification (ara-Arab)": 61.0, - "MassiveScenarioClassification (kan-Knda)": 59.36, - "MassiveScenarioClassification (jav-Latn)": 56.24, - "MassiveScenarioClassification (por-Latn)": 68.33, - "MassiveScenarioClassification (tha-Thai)": 69.06, - "MassiveScenarioClassification (aze-Latn)": 58.49, - "MassiveScenarioClassification (fra-Latn)": 68.74, - "MassiveScenarioClassification (ron-Latn)": 66.06, - "MassiveScenarioClassification (nld-Latn)": 71.11, - "MassiveScenarioClassification (fas-Arab)": 67.55, - "MassiveScenarioClassification (deu-Latn)": 71.25, - "MassiveScenarioClassification (nob-Latn)": 70.44, - "MassiveScenarioClassification (msa-Latn)": 63.55, - "MassiveScenarioClassification (afr-Latn)": 62.35, - "MassiveScenarioClassification (hun-Latn)": 70.53, - "MassiveScenarioClassification (swe-Latn)": 72.77, - "MassiveScenarioClassification (kat-Geor)": 47.82, - "MassiveScenarioClassification (jpn-Jpan)": 73.16, - "MassiveScenarioClassification (khm-Khmr)": 41.14, - "MassiveScenarioClassification (fin-Latn)": 68.62, - "MassiveScenarioClassification (da)": 67.46, - "MassiveScenarioClassification (nb)": 66.18, - "MassiveScenarioClassification (sv)": 69.15, - "MassiveScenarioClassification (pl)": 69.82, - "MultilingualSentiment (cmn-Hans)": 70.81, - "MultilingualSentiment": 68.58, - "NoRecClassification (nob-Latn)": 58.43, - "NoRecClassification": 62.76, - "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 80.15, - "NordicLangClassification": 82.29, - "NorwegianParliament": 60.36, - "OnlineShopping (cmn-Hans)": 90.45, - "OnlineShopping": 90.81, - "PAC (pol-Latn)": 70.33, - "PAC": 70.37, - "PolEmo2.0-IN (pol-Latn)": 77.06, - "PolEmo2.0-IN": 77.06, - "PolEmo2.0-OUT (pol-Latn)": 53.48, - "PolEmo2.0-OUT": 53.38, - "RuReviewsClassification (rus-Cyrl)": 65.28, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 58.2, - "RuSciBenchOECDClassification (rus-Cyrl)": 43.91, - "ScalaDaClassification": 50.77, - "ScalaNbClassification": 50.44, - "TNews (cmn-Hans)": 48.8, - "TNews": 48.38, - "ToxicConversationsClassification": 66.01, - "TweetSentimentExtractionClassification": 62.8, - "Waimai (cmn-Hans)": 86.3, - "Waimai": 85.02 + "Model": "e5-large-v2" } ] }, "Clustering": { "v_measure": [ { - "Model": "multilingual-e5-large", - "8TagsClustering": 33.88, - "AlloProfClusteringP2P": 62.99, - "AlloProfClusteringS2S": 32.26, - "BiorxivClusteringP2P": 35.5, - "BiorxivClusteringS2S": 33.3, - "CLSClusteringP2P": 40.68, - "CLSClusteringS2S": 38.59, - "GeoreviewClusteringP2P (rus-Cyrl)": 60.51, - "HALClusteringS2S": 22.44, - "MLSUMClusteringP2P (rus-Cyrl)": 42.79, - "MLSUMClusteringP2P": 44.04, - "MLSUMClusteringS2S (rus-Cyrl)": 44.32, - "MLSUMClusteringS2S": 37.65, - "MasakhaNEWSClusteringP2P (amh-Ethi)": 67.16, - "MasakhaNEWSClusteringP2P (eng)": 61.1, - "MasakhaNEWSClusteringP2P (fra-Latn)": 41.66, - "MasakhaNEWSClusteringP2P (hau-Latn)": 60.7, - "MasakhaNEWSClusteringP2P (ibo-Latn)": 48.41, - "MasakhaNEWSClusteringP2P (lin-Latn)": 57.69, - "MasakhaNEWSClusteringP2P (lug-Latn)": 71.95, - "MasakhaNEWSClusteringP2P (orm-Ethi)": 60.14, - "MasakhaNEWSClusteringP2P (pcm-Latn)": 80.84, - "MasakhaNEWSClusteringP2P (run-Latn)": 59.91, - "MasakhaNEWSClusteringP2P (sna-Latn)": 53.3, - "MasakhaNEWSClusteringP2P (som-Latn)": 34.38, - "MasakhaNEWSClusteringP2P (swa-Latn)": 33.25, - "MasakhaNEWSClusteringP2P (tir-Ethi)": 54.21, - "MasakhaNEWSClusteringP2P (xho-Latn)": 41.12, - "MasakhaNEWSClusteringP2P (yor-Latn)": 36.22, - "MasakhaNEWSClusteringP2P (fra)": 40.94, - "MasakhaNEWSClusteringS2S (amh-Ethi)": 47.24, - "MasakhaNEWSClusteringS2S (eng)": 53.93, - "MasakhaNEWSClusteringS2S (fra-Latn)": 39.84, - "MasakhaNEWSClusteringS2S (hau-Latn)": 19.24, - "MasakhaNEWSClusteringS2S (ibo-Latn)": 28.88, - "MasakhaNEWSClusteringS2S (lin-Latn)": 42.22, - "MasakhaNEWSClusteringS2S (lug-Latn)": 43.63, - "MasakhaNEWSClusteringS2S (orm-Ethi)": 26.29, - "MasakhaNEWSClusteringS2S (pcm-Latn)": 59.77, - "MasakhaNEWSClusteringS2S (run-Latn)": 51.46, - "MasakhaNEWSClusteringS2S (sna-Latn)": 48.14, - "MasakhaNEWSClusteringS2S (som-Latn)": 25.14, - "MasakhaNEWSClusteringS2S (swa-Latn)": 7.28, - "MasakhaNEWSClusteringS2S (tir-Ethi)": 50.51, - "MasakhaNEWSClusteringS2S (xho-Latn)": 30.98, - "MasakhaNEWSClusteringS2S (yor-Latn)": 34.09, - "MasakhaNEWSClusteringS2S (fra)": 30.56, - "MedrxivClusteringP2P": 31.7, - "MedrxivClusteringS2S": 29.76, - "RedditClustering": 46.91, - "RedditClusteringP2P": 63.0, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 52.03, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 45.11, - "StackExchangeClustering": 58.37, - "StackExchangeClusteringP2P": 32.9, - "ThuNewsClusteringP2P": 58.05, - "ThuNewsClusteringS2S": 55.59, - "TwentyNewsgroupsClustering": 39.4 + "Model": "e5-large-v2", + "BiorxivClusteringP2P": 36.72, + "BiorxivClusteringS2S": 35.47, + "MedrxivClusteringP2P": 31.45, + "MedrxivClusteringS2S": 29.91, + "RedditClustering": 55.5, + "RedditClusteringP2P": 63.71, + "StackExchangeClustering": 65.23, + "StackExchangeClusteringP2P": 33.62, + "TwentyNewsgroupsClustering": 48.73 } ] }, "PairClassification": { "ap": [ { - "Model": "multilingual-e5-large", - "CDSC-E (pol-Latn)": 74.47, - "CDSC-E": 74.47, - "Cmnli": 78.18, - "Ocnli": 61.6, - "OpusparcusPC (deu-Latn)": 97.27, - "OpusparcusPC (en)": 98.74, - "OpusparcusPC (fin-Latn)": 94.26, - "OpusparcusPC (fra-Latn)": 93.68, - "OpusparcusPC (rus-Cyrl)": 89.64, - "OpusparcusPC (swe-Latn)": 94.98, - "OpusparcusPC (fr)": 93.89, - "PPC": 92.18, - "PSC (pol-Latn)": 99.4, - "PSC": 99.39, - "PawsXPairClassification (deu-Latn)": 56.81, - "PawsXPairClassification (en)": 62.97, - "PawsXPairClassification (spa-Latn)": 56.85, - "PawsXPairClassification (fra-Latn)": 58.68, - "PawsXPairClassification (jpn-Hira)": 50.7, - "PawsXPairClassification (kor-Hang)": 52.08, - "PawsXPairClassification (cmn-Hans)": 56.82, - "PawsXPairClassification (fr)": 58.5, - "SICK-E-PL (pol-Latn)": 75.95, - "SICK-E-PL": 75.96, - "SprintDuplicateQuestions": 93.14, - "TERRa (rus-Cyrl)": 58.4, - "TwitterSemEval2015": 75.28, - "TwitterURLCorpus": 85.83 + "Model": "e5-large-v2" } ] }, "Reranking": { "map": [ { - "Model": "multilingual-e5-large", - "AlloprofReranking (fra-Latn)": 69.44, - "AlloprofReranking": 57.37, - "AskUbuntuDupQuestions": 59.24, - "CMedQAv1": 68.25, - "CMedQAv2": 68.56, - "MMarcoReranking (cmn-Hans)": 29.12, - "MMarcoReranking": 21.34, - "MindSmallReranking": 30.24, - "RuBQReranking (rus-Cyrl)": 75.58, - "SciDocsRR": 84.22, - "StackOverflowDupQuestions": 50.14, - "SyntecReranking (fra-Latn)": 85.45, - "SyntecReranking": 86.9, - "T2Reranking (cmn-Hans)": 66.32, - "T2Reranking": 65.83 + "Model": "e5-large-v2" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "multilingual-e5-large", - "AILACasedocs": 26.43, - "AILAStatutes": 20.84, - "ARCChallenge": 10.83, - "AlloprofRetrieval (fra-Latn)": 39.34, - "AlloprofRetrieval": 38.15, - "AlphaNLI": 13.59, - "ArguAna": 54.36, - "ArguAna-PL (pol-Latn)": 52.99, - "ArguAna-PL": 53.02, - "BSARDRetrieval (fra-Latn)": 21.28, - "BSARDRetrieval": 0.27, - "CmedqaRetrieval (cmn-Hans)": 28.66, - "CmedqaRetrieval": 28.67, - "CovidRetrieval (cmn-Hans)": 75.61, - "CovidRetrieval": 75.51, - "DBPedia-PL": 35.82, - "DuRetrieval (cmn-Hans)": 85.3, - "DuRetrieval": 85.32, - "EcomRetrieval (cmn-Hans)": 54.67, - "EcomRetrieval": 54.75, - "FiQA-PL (pol-Latn)": 32.97, - "FiQA-PL": 33.0, - "FiQA2018": 43.81, - "GerDaLIRSmall (deu-Latn)": 15.72, - "HellaSwag": 27.35, - "HotpotQA-PL": 67.41, - "LEMBNarrativeQARetrieval": 24.22, - "LEMBNeedleRetrieval": 28.0, - "LEMBPasskeyRetrieval": 38.25, - "LEMBQMSumRetrieval": 24.26, - "LEMBSummScreenFDRetrieval": 71.12, - "LEMBWikimQARetrieval": 56.8, - "LeCaRDv2 (zho-Hans)": 55.83, - "LegalBenchConsumerContractsQA": 73.3, - "LegalBenchCorporateLobbying": 89.72, - "LegalQuAD (deu-Latn)": 43.17, - "LegalSummarization": 62.1, - "MMarcoRetrieval (cmn-Hans)": 79.2, - "MMarcoRetrieval": 79.2, - "MSMARCO-PL": 33.38, - "MedicalRetrieval (cmn-Hans)": 51.44, - "MedicalRetrieval": 51.44, - "MintakaRetrieval (ara-Arab)": 26.5, - "MintakaRetrieval (deu-Latn)": 32.77, - "MintakaRetrieval (spa-Latn)": 34.23, - "MintakaRetrieval (fra-Latn)": 34.24, - "MintakaRetrieval (hin-Deva)": 27.45, - "MintakaRetrieval (ita-Latn)": 33.84, - "MintakaRetrieval (jpn-Hira)": 26.45, - "MintakaRetrieval (por-Latn)": 35.9, - "MintakaRetrieval (fr)": 25.2, - "NFCorpus": 33.95, - "NFCorpus-PL (pol-Latn)": 30.21, - "NFCorpus-PL": 30.24, - "NQ-PL": 52.79, - "PIQA": 28.82, - "Quail": 4.85, - "Quora-PL": 83.65, - "RARbCode": 58.92, - "RARbMath": 67.32, - "RiaNewsRetrieval (rus-Cyrl)": 80.67, - "RuBQRetrieval (rus-Cyrl)": 74.11, - "SCIDOCS": 17.45, - "SCIDOCS-PL (pol-Latn)": 13.82, - "SCIDOCS-PL": 13.81, - "SIQA": 5.36, - "SciFact": 70.42, - "SciFact-PL (pol-Latn)": 65.66, - "SciFact-PL": 65.66, - "SpartQA": 5.64, - "SyntecRetrieval (fra-Latn)": 82.39, - "SyntecRetrieval": 81.07, - "T2Retrieval (cmn-Hans)": 76.07, - "T2Retrieval": 76.11, - "TRECCOVID": 71.21, - "TRECCOVID-PL (pol-Latn)": 69.9, - "TRECCOVID-PL": 70.03, - "TempReasonL1": 1.14, - "TempReasonL2Fact": 42.97, - "TempReasonL2Pure": 2.05, - "TempReasonL3Fact": 38.22, - "TempReasonL3Pure": 8.31, - "Touche2020": 23.13, - "VideoRetrieval (cmn-Hans)": 58.28, - "VideoRetrieval": 58.25, - "WinoGrande": 54.99, - "XPQARetrieval (ara-Arab_ara-Arab)": 43.69, - "XPQARetrieval (eng-Latn_ara-Arab)": 30.86, - "XPQARetrieval (ara-Arab_eng-Latn)": 39.11, - "XPQARetrieval (deu-Latn_deu-Latn)": 76.83, - "XPQARetrieval (eng-Latn_deu-Latn)": 42.87, - "XPQARetrieval (deu-Latn_eng-Latn)": 68.25, - "XPQARetrieval (spa-Latn_spa-Latn)": 61.77, - "XPQARetrieval (eng-Latn_spa-Latn)": 37.55, - "XPQARetrieval (spa-Latn_eng-Latn)": 52.86, - "XPQARetrieval (fra-Latn_fra-Latn)": 61.38, - "XPQARetrieval (eng-Latn_fra-Latn)": 39.12, - "XPQARetrieval (fra-Latn_eng-Latn)": 57.93, - "XPQARetrieval (hin-Deva_hin-Deva)": 71.09, - "XPQARetrieval (eng-Latn_hin-Deva)": 32.39, - "XPQARetrieval (hin-Deva_eng-Latn)": 68.31, - "XPQARetrieval (ita-Latn_ita-Latn)": 74.32, - "XPQARetrieval (eng-Latn_ita-Latn)": 37.95, - "XPQARetrieval (ita-Latn_eng-Latn)": 64.54, - "XPQARetrieval (jpn-Hira_jpn-Hira)": 74.11, - "XPQARetrieval (eng-Latn_jpn-Hira)": 38.31, - "XPQARetrieval (jpn-Hira_eng-Latn)": 65.42, - "XPQARetrieval (kor-Hang_kor-Hang)": 35.72, - "XPQARetrieval (eng-Latn_kor-Hang)": 31.09, - "XPQARetrieval (kor-Hang_eng-Latn)": 34.06, - "XPQARetrieval (pol-Latn_pol-Latn)": 51.01, - "XPQARetrieval (eng-Latn_pol-Latn)": 30.49, - "XPQARetrieval (pol-Latn_eng-Latn)": 44.66, - "XPQARetrieval (por-Latn_por-Latn)": 41.1, - "XPQARetrieval (eng-Latn_por-Latn)": 22.03, - "XPQARetrieval (por-Latn_eng-Latn)": 35.15, - "XPQARetrieval (tam-Taml_tam-Taml)": 39.51, - "XPQARetrieval (eng-Latn_tam-Taml)": 17.33, - "XPQARetrieval (tam-Taml_eng-Latn)": 33.67, - "XPQARetrieval (cmn-Hans_cmn-Hans)": 66.27, - "XPQARetrieval (eng-Latn_cmn-Hans)": 26.24, - "XPQARetrieval (cmn-Hans_eng-Latn)": 55.15, - "XPQARetrieval (fr)": 66.15 + "Model": "e5-large-v2" } ] }, "STS": { "spearman": [ { - "Model": "multilingual-e5-large", - "AFQMC (cmn-Hans)": 33.01, - "AFQMC": 33.02, - "ATEC (cmn-Hans)": 39.8, - "ATEC": 39.81, - "BIOSSES": 82.49, - "BQ (cmn-Hans)": 46.44, - "BQ": 46.44, - "CDSC-R (pol-Latn)": 91.0, - "CDSC-R": 91.0, - "LCQMC (cmn-Hans)": 75.95, - "LCQMC": 75.95, - "PAWSX (cmn-Hans)": 14.63, - "PAWSX": 14.63, - "QBQTC": 29.77, - "RUParaPhraserSTS (rus-Cyrl)": 71.82, - "RuSTSBenchmarkSTS (rus-Cyrl)": 83.15, - "SICK-R": 80.23, - "SICK-R-PL (pol-Latn)": 75.08, - "SICK-R-PL": 75.08, - "SICKFr (fra-Latn)": 78.81, - "SICKFr": 78.78, - "STS12": 80.02, - "STS13": 81.55, - "STS14": 77.72, - "STS15": 89.31, - "STS16": 85.79, - "STS17 (en-en)": 88.12, - "STS17 (spa-Latn)": 86.71, - "STS17 (spa-Latn_eng-Latn)": 80.74, - "STS17 (eng-Latn_ara-Arab)": 75.03, - "STS17 (fra-Latn_eng-Latn)": 85.62, - "STS17 (kor-Hang)": 82.27, - "STS17 (ita-Latn_eng-Latn)": 84.52, - "STS17 (ara-Arab)": 77.83, - "STS17 (eng-Latn_tur-Latn)": 71.22, - "STS17 (eng-Latn_deu-Latn)": 86.15, - "STS17 (nld-Latn_eng-Latn)": 85.29, - "STS22 (spa-Latn)": 64.6, - "STS22 (spa-Latn_eng-Latn)": 72.51, - "STS22 (deu-Latn_eng-Latn)": 56.59, - "STS22 (cmn-Hans_eng-Latn)": 65.95, - "STS22 (deu-Latn_pol-Latn)": 49.58, - "STS22 (fra-Latn_pol-Latn)": 50.71, - "STS22 (en)": 63.66, - "STS22 (ara-Arab)": 56.95, - "STS22 (spa-Latn_ita-Latn)": 68.92, - "STS22 (tur-Latn)": 63.56, - "STS22 (deu-Latn_fra-Latn)": 67.96, - "STS22 (ita-Latn)": 76.99, - "STS22 (cmn-Hans)": 66.82, - "STS22 (rus-Cyrl)": 59.89, - "STS22 (fra-Latn)": 76.77, - "STS22 (pol-Latn_eng-Latn)": 65.54, - "STS22 (deu-Latn)": 56.58, - "STS22 (pol-Latn)": 34.65, - "STS22 (zh)": 65.64, - "STS22 (pl)": 34.66, - "STSB (cmn-Hans)": 81.08, - "STSB": 81.08, - "STSBenchmark": 87.29, - "STSBenchmarkMultilingualSTS (cmn-Hans)": 81.22, - "STSBenchmarkMultilingualSTS (en)": 87.29, - "STSBenchmarkMultilingualSTS (pol-Latn)": 81.06, - "STSBenchmarkMultilingualSTS (nld-Latn)": 81.63, - "STSBenchmarkMultilingualSTS (ita-Latn)": 81.75, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 83.05, - "STSBenchmarkMultilingualSTS (por-Latn)": 73.31, - "STSBenchmarkMultilingualSTS (spa-Latn)": 83.81, - "STSBenchmarkMultilingualSTS (fra-Latn)": 83.28, - "STSBenchmarkMultilingualSTS (deu-Latn)": 84.27, - "STSBenchmarkMultilingualSTS (fr)": 82.53 + "Model": "e5-large-v2" } ] }, "Summarization": { "spearman": [ { - "Model": "multilingual-e5-large", - "SummEval": 29.65, - "SummEvalFr (fra-Latn)": 30.92, - "SummEvalFr": 30.92 + "Model": "e5-large-v2" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "multilingual-e5-large" + "Model": "e5-large-v2", + "Core17InstructionRetrieval": 0.12, + "News21InstructionRetrieval": 0.87, + "Robust04InstructionRetrieval": -4.16 } ] } }, - "text-embedding-3-large": { + "gbert-large": { "BitextMining": { "f1": [ { - "Model": "text-embedding-3-large" + "Model": "gbert-large" } ] }, "Classification": { "accuracy": [ { - "Model": "text-embedding-3-large", - "AmazonCounterfactualClassification (en)": 78.93, - "AmazonPolarityClassification": 92.85, - "AmazonReviewsClassification (en)": 48.7, - "Banking77Classification": 85.69, - "EmotionClassification": 51.58, - "ImdbClassification": 87.67, - "MTOPDomainClassification (en)": 95.36, - "MTOPIntentClassification (en)": 75.07, - "MassiveIntentClassification (en)": 74.64, - "MassiveScenarioClassification (en)": 79.79, - "ToxicConversationsClassification": 72.92, - "TweetSentimentExtractionClassification": 62.22 + "Model": "gbert-large" } ] }, "Clustering": { "v_measure": [ { - "Model": "text-embedding-3-large", - "ArxivClusteringP2P": 49.01, - "ArxivClusteringS2S": 44.45, - "BiorxivClusteringP2P": 38.03, - "BiorxivClusteringS2S": 36.53, - "MedrxivClusteringP2P": 32.7, - "MedrxivClusteringS2S": 31.27, - "RedditClustering": 67.84, - "RedditClusteringP2P": 67.96, - "StackExchangeClustering": 76.26, - "StackExchangeClusteringP2P": 36.88, - "TwentyNewsgroupsClustering": 58.14 + "Model": "gbert-large", + "BlurbsClusteringP2P": 39.3, + "BlurbsClusteringS2S": 13.38, + "TenKGnadClusteringP2P": 41.69, + "TenKGnadClusteringS2S": 34.97 } ] }, "PairClassification": { "ap": [ { - "Model": "text-embedding-3-large", - "SprintDuplicateQuestions": 92.25, - "TwitterSemEval2015": 77.13, - "TwitterURLCorpus": 87.78 + "Model": "gbert-large" } ] }, "Reranking": { "map": [ { - "Model": "text-embedding-3-large", - "AskUbuntuDupQuestions": 65.03, - "MindSmallReranking": 29.86, - "SciDocsRR": 86.66, - "StackOverflowDupQuestions": 55.08 + "Model": "gbert-large" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "text-embedding-3-large", - "AILACasedocs": 39.0, - "AILAStatutes": 41.31, - "ARCChallenge": 23.98, - "AlphaNLI": 37.27, - "ArguAna": 58.05, - "BrightRetrieval (theoremqa_questions)": 22.22, - "BrightRetrieval (leetcode)": 23.65, - "BrightRetrieval (earth_science)": 26.27, - "BrightRetrieval (psychology)": 27.52, - "BrightRetrieval (robotics)": 12.93, - "BrightRetrieval (economics)": 19.98, - "BrightRetrieval (stackoverflow)": 12.49, - "BrightRetrieval (biology)": 23.67, - "BrightRetrieval (theoremqa_theorems)": 9.25, - "BrightRetrieval (pony)": 2.45, - "BrightRetrieval (sustainable_living)": 20.32, - "BrightRetrieval (aops)": 8.45, - "CQADupstackRetrieval": 47.54, - "ClimateFEVER": 30.27, - "DBPedia": 44.76, - "FEVER": 87.94, - "FiQA2018": 55.0, - "GerDaLIRSmall": 32.77, - "HellaSwag": 34.12, - "HotpotQA": 71.58, - "LEMBNarrativeQARetrieval": 44.09, - "LEMBNeedleRetrieval": 29.25, - "LEMBPasskeyRetrieval": 63.0, - "LEMBQMSumRetrieval": 32.49, - "LEMBSummScreenFDRetrieval": 84.8, - "LEMBWikimQARetrieval": 54.16, - "LeCaRDv2": 57.2, - "LegalBenchConsumerContractsQA": 79.39, - "LegalBenchCorporateLobbying": 95.09, - "LegalQuAD": 57.47, - "LegalSummarization": 71.55, - "MSMARCO": 40.24, - "NFCorpus": 42.07, - "NQ": 61.27, - "PIQA": 41.96, - "Quail": 10.15, - "QuoraRetrieval": 89.05, - "RARbCode": 89.64, - "RARbMath": 90.08, - "SCIDOCS": 23.11, - "SIQA": 3.44, - "SciFact": 77.77, - "SpartQA": 7.51, - "TRECCOVID": 79.56, - "TempReasonL1": 2.13, - "TempReasonL2Fact": 28.65, - "TempReasonL2Pure": 10.34, - "TempReasonL3Fact": 25.52, - "TempReasonL3Pure": 15.28, - "Touche2020": 23.35, - "WinoGrande": 29.11 + "Model": "gbert-large" } ] }, "STS": { "spearman": [ { - "Model": "text-embedding-3-large", - "BIOSSES": 84.68, - "SICK-R": 79.0, - "STS12": 72.84, - "STS13": 86.1, - "STS14": 81.15, - "STS15": 88.49, - "STS16": 85.08, - "STS17 (en-en)": 90.22, - "STS22 (en)": 66.14, - "STSBenchmark": 83.56 + "Model": "gbert-large" } ] }, "Summarization": { "spearman": [ { - "Model": "text-embedding-3-large", - "SummEval": 29.92 + "Model": "gbert-large" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "text-embedding-3-large", - "Core17InstructionRetrieval": -0.2, - "News21InstructionRetrieval": -2.03, - "Robust04InstructionRetrieval": -5.81 + "Model": "gbert-large" } ] } }, - "contriever": { + "voyage-2": { "BitextMining": { "f1": [ { - "Model": "contriever" + "Model": "voyage-2" } ] }, "Classification": { "accuracy": [ { - "Model": "contriever" + "Model": "voyage-2", + "AmazonReviewsClassification (fr)": 37.26, + "MTOPDomainClassification (fr)": 79.79, + "MTOPIntentClassification (fr)": 45.62, + "MasakhaNEWSClassification (fra)": 80.19, + "MassiveIntentClassification (fr)": 53.7, + "MassiveScenarioClassification (fr)": 62.46 } ] }, "Clustering": { "v_measure": [ { - "Model": "contriever" + "Model": "voyage-2", + "AlloProfClusteringP2P": 57.96, + "AlloProfClusteringS2S": 41.65, + "HALClusteringS2S": 24.84, + "MLSUMClusteringP2P": 45.08, + "MLSUMClusteringS2S": 38.77, + "MasakhaNEWSClusteringP2P (fra)": 48.54, + "MasakhaNEWSClusteringS2S (fra)": 36.33 } ] }, "PairClassification": { "ap": [ { - "Model": "contriever" + "Model": "voyage-2", + "OpusparcusPC (fr)": 89.76, + "PawsXPairClassification (fr)": 58.96 } ] }, "Reranking": { "map": [ { - "Model": "contriever" + "Model": "voyage-2", + "AlloprofReranking": 63.54, + "SyntecReranking": 82.65 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "contriever", - "ARCChallenge": 8.62, - "AlphaNLI": 31.77, - "HellaSwag": 17.73, - "PIQA": 24.64, - "Quail": 4.97, - "RARbCode": 9.28, - "RARbMath": 30.76, - "SIQA": 1.27, - "SpartQA": 10.94, - "TempReasonL1": 1.93, - "TempReasonL2Fact": 22.68, - "TempReasonL2Pure": 1.12, - "TempReasonL3Fact": 20.62, - "TempReasonL3Pure": 7.8, - "WinoGrande": 47.15 + "Model": "voyage-2", + "AlloprofRetrieval": 45.5, + "BSARDRetrieval": 0.15, + "MintakaRetrieval (fr)": 15.51, + "SyntecRetrieval": 75.83, + "XPQARetrieval (fr)": 67.07 } ] }, "STS": { "spearman": [ { - "Model": "contriever" + "Model": "voyage-2", + "SICKFr": 68.51, + "STS22 (fr)": 70.51, + "STSBenchmarkMultilingualSTS (fr)": 76.43 } ] }, "Summarization": { "spearman": [ { - "Model": "contriever" + "Model": "voyage-2", + "SummEvalFr": 30.88 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "contriever" + "Model": "voyage-2" } ] } }, - "rubert-tiny-turbo": { + "voyage-multilingual-2": { "BitextMining": { "f1": [ { - "Model": "rubert-tiny-turbo", - "Tatoeba (rus-Cyrl_eng-Latn)": 83.14 + "Model": "voyage-multilingual-2" } ] }, "Classification": { "accuracy": [ { - "Model": "rubert-tiny-turbo", - "AmazonPolarityClassification": 68.36, - "Banking77Classification": 59.86, - "EmotionClassification": 29.5, - "GeoreviewClassification (rus-Cyrl)": 41.36, - "HeadlineClassification (rus-Cyrl)": 68.9, - "ImdbClassification": 58.36, - "InappropriatenessClassification (rus-Cyrl)": 59.11, - "KinopoiskClassification (rus-Cyrl)": 50.47, - "MassiveIntentClassification (cmo-Hans)": 5.21, - "MassiveIntentClassification (kor-Kore)": 2.53, - "MassiveIntentClassification (hin-Deva)": 2.56, - "MassiveIntentClassification (kan-Knda)": 2.06, - "MassiveIntentClassification (kat-Geor)": 2.64, - "MassiveIntentClassification (amh-Ethi)": 2.28, - "MassiveIntentClassification (mya-Mymr)": 3.96, - "MassiveIntentClassification (ell-Grek)": 9.66, - "MassiveIntentClassification (lav-Latn)": 22.32, - "MassiveIntentClassification (mal-Mlym)": 2.39, - "MassiveIntentClassification (mon-Cyrl)": 28.99, - "MassiveIntentClassification (urd-Arab)": 2.45, - "MassiveIntentClassification (fas-Arab)": 3.34, - "MassiveIntentClassification (ron-Latn)": 31.72, - "MassiveIntentClassification (isl-Latn)": 24.85, - "MassiveIntentClassification (en)": 50.16, - "MassiveIntentClassification (hun-Latn)": 25.52, - "MassiveIntentClassification (fra-Latn)": 31.51, - "MassiveIntentClassification (tha-Thai)": 3.74, - "MassiveIntentClassification (deu-Latn)": 32.1, - "MassiveIntentClassification (tur-Latn)": 27.56, - "MassiveIntentClassification (por-Latn)": 34.35, - "MassiveIntentClassification (sqi-Latn)": 32.38, - "MassiveIntentClassification (cmo-Hant)": 6.81, - "MassiveIntentClassification (hye-Armn)": 2.72, - "MassiveIntentClassification (dan-Latn)": 33.95, - "MassiveIntentClassification (afr-Latn)": 30.4, - "MassiveIntentClassification (ara-Arab)": 3.8, - "MassiveIntentClassification (jav-Latn)": 28.53, - "MassiveIntentClassification (tel-Telu)": 2.21, - "MassiveIntentClassification (tgl-Latn)": 32.02, - "MassiveIntentClassification (swa-Latn)": 27.79, - "MassiveIntentClassification (jpn-Jpan)": 5.61, - "MassiveIntentClassification (msa-Latn)": 28.94, - "MassiveIntentClassification (nob-Latn)": 32.3, - "MassiveIntentClassification (fin-Latn)": 31.13, - "MassiveIntentClassification (ind-Latn)": 33.56, - "MassiveIntentClassification (cym-Latn)": 31.68, - "MassiveIntentClassification (slv-Latn)": 31.39, - "MassiveIntentClassification (spa-Latn)": 31.03, - "MassiveIntentClassification (ben-Beng)": 3.08, - "MassiveIntentClassification (swe-Latn)": 30.23, - "MassiveIntentClassification (rus-Cyrl)": 57.98, - "MassiveIntentClassification (aze-Latn)": 23.58, - "MassiveIntentClassification (ita-Latn)": 35.24, - "MassiveIntentClassification (pol-Latn)": 26.82, - "MassiveIntentClassification (vie-Latn)": 23.72, - "MassiveIntentClassification (tam-Taml)": 1.5, - "MassiveIntentClassification (heb-Hebr)": 2.25, - "MassiveIntentClassification (nld-Latn)": 32.44, - "MassiveIntentClassification (khm-Khmr)": 5.14, - "MassiveScenarioClassification (cmo-Hans)": 10.6, - "MassiveScenarioClassification (kor-Kore)": 5.63, - "MassiveScenarioClassification (hin-Deva)": 7.41, - "MassiveScenarioClassification (kan-Knda)": 7.6, - "MassiveScenarioClassification (kat-Geor)": 7.01, - "MassiveScenarioClassification (amh-Ethi)": 7.68, - "MassiveScenarioClassification (mya-Mymr)": 10.73, - "MassiveScenarioClassification (ell-Grek)": 17.95, - "MassiveScenarioClassification (lav-Latn)": 29.29, - "MassiveScenarioClassification (mal-Mlym)": 6.92, - "MassiveScenarioClassification (mon-Cyrl)": 33.7, - "MassiveScenarioClassification (urd-Arab)": 8.53, - "MassiveScenarioClassification (fas-Arab)": 6.62, - "MassiveScenarioClassification (ron-Latn)": 40.02, - "MassiveScenarioClassification (isl-Latn)": 33.1, - "MassiveScenarioClassification (en)": 61.29, - "MassiveScenarioClassification (hun-Latn)": 36.41, - "MassiveScenarioClassification (fra-Latn)": 42.9, - "MassiveScenarioClassification (tha-Thai)": 8.26, - "MassiveScenarioClassification (deu-Latn)": 42.07, - "MassiveScenarioClassification (tur-Latn)": 34.85, - "MassiveScenarioClassification (por-Latn)": 40.79, - "MassiveScenarioClassification (sqi-Latn)": 42.66, - "MassiveScenarioClassification (cmo-Hant)": 11.93, - "MassiveScenarioClassification (hye-Armn)": 8.78, - "MassiveScenarioClassification (dan-Latn)": 43.69, - "MassiveScenarioClassification (afr-Latn)": 40.84, - "MassiveScenarioClassification (ara-Arab)": 11.86, - "MassiveScenarioClassification (jav-Latn)": 37.23, - "MassiveScenarioClassification (tel-Telu)": 6.91, - "MassiveScenarioClassification (tgl-Latn)": 38.16, - "MassiveScenarioClassification (swa-Latn)": 35.66, - "MassiveScenarioClassification (jpn-Jpan)": 10.6, - "MassiveScenarioClassification (msa-Latn)": 38.97, - "MassiveScenarioClassification (nob-Latn)": 39.05, - "MassiveScenarioClassification (fin-Latn)": 35.19, - "MassiveScenarioClassification (ind-Latn)": 39.54, - "MassiveScenarioClassification (cym-Latn)": 39.85, - "MassiveScenarioClassification (slv-Latn)": 35.98, - "MassiveScenarioClassification (spa-Latn)": 37.13, - "MassiveScenarioClassification (ben-Beng)": 8.85, - "MassiveScenarioClassification (swe-Latn)": 36.12, - "MassiveScenarioClassification (rus-Cyrl)": 62.9, - "MassiveScenarioClassification (aze-Latn)": 30.32, - "MassiveScenarioClassification (ita-Latn)": 42.69, - "MassiveScenarioClassification (pol-Latn)": 31.62, - "MassiveScenarioClassification (vie-Latn)": 31.89, - "MassiveScenarioClassification (tam-Taml)": 7.01, - "MassiveScenarioClassification (heb-Hebr)": 7.61, - "MassiveScenarioClassification (nld-Latn)": 40.94, - "MassiveScenarioClassification (khm-Khmr)": 8.51, - "RuReviewsClassification (rus-Cyrl)": 60.66, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 52.93, - "RuSciBenchOECDClassification (rus-Cyrl)": 40.79, - "ToxicConversationsClassification": 57.77, - "TweetSentimentExtractionClassification": 55.3 + "Model": "voyage-multilingual-2", + "AmazonReviewsClassification (fr)": 43.36, + "MTOPDomainClassification (fr)": 90.33, + "MTOPIntentClassification (fr)": 60.52, + "MasakhaNEWSClassification (fra)": 74.81, + "MassiveIntentClassification (fr)": 68.06, + "MassiveScenarioClassification (fr)": 74.29 } ] }, "Clustering": { "v_measure": [ { - "Model": "rubert-tiny-turbo", - "ArxivClusteringP2P": 24.83, - "ArxivClusteringS2S": 16.68, - "BiorxivClusteringP2P": 20.0, - "BiorxivClusteringS2S": 12.67, - "GeoreviewClusteringP2P (rus-Cyrl)": 59.71, - "MLSUMClusteringP2P (rus-Cyrl)": 40.02, - "MLSUMClusteringS2S (rus-Cyrl)": 41.36, - "MedrxivClusteringP2P": 20.79, - "MedrxivClusteringS2S": 18.18, - "RedditClustering": 26.28, - "RedditClusteringP2P": 40.48, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 47.55, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.44, - "StackExchangeClustering": 33.51, - "StackExchangeClusteringP2P": 27.98, - "TwentyNewsgroupsClustering": 19.9 + "Model": "voyage-multilingual-2", + "AlloProfClusteringP2P": 65.37, + "AlloProfClusteringS2S": 47.03, + "HALClusteringS2S": 27.67, + "MLSUMClusteringP2P (fr)": 45.99, + "MLSUMClusteringS2S (fr)": 45.57, + "MasakhaNEWSClusteringP2P (fra)": 44.53, + "MasakhaNEWSClusteringS2S (fra)": 49.8 } ] }, "PairClassification": { "ap": [ { - "Model": "rubert-tiny-turbo", - "OpusparcusPC (rus-Cyrl)": 87.58, - "TERRa (rus-Cyrl)": 56.09 + "Model": "voyage-multilingual-2", + "OpusparcusPC (fr)": 93.68, + "PawsXPairClassification (fr)": 63.64 } ] }, "Reranking": { "map": [ { - "Model": "rubert-tiny-turbo", - "RuBQReranking (rus-Cyrl)": 62.15 + "Model": "voyage-multilingual-2", + "AlloprofReranking": 74.78, + "SyntecReranking": 90.4 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "rubert-tiny-turbo", - "AILACasedocs": 7.43, - "AILAStatutes": 13.62, - "ARCChallenge": 3.85, - "AlphaNLI": 14.15, - "ArguAna": 32.03, - "ClimateFEVER": 5.56, - "DBPedia": 9.61, - "RiaNewsRetrieval (rus-Cyrl)": 51.27, - "RuBQRetrieval (rus-Cyrl)": 51.73 + "Model": "voyage-multilingual-2", + "AlloprofRetrieval": 58.27, + "BSARDRetrieval": 5.14, + "LEMBNarrativeQARetrieval": 64.69, + "LEMBNeedleRetrieval": 75.25, + "LEMBPasskeyRetrieval": 97.0, + "LEMBQMSumRetrieval": 51.49, + "LEMBSummScreenFDRetrieval": 99.11, + "LEMBWikimQARetrieval": 87.49, + "MintakaRetrieval (fr)": 49.19, + "SyntecRetrieval": 87.28, + "XPQARetrieval (fr)": 72.92 } ] }, "STS": { "spearman": [ { - "Model": "rubert-tiny-turbo", - "RUParaPhraserSTS (rus-Cyrl)": 72.15, - "RuSTSBenchmarkSTS (rus-Cyrl)": 78.48, - "STS22 (cmn-Hans)": 32.83, - "STS22 (deu-Latn_fra-Latn)": 17.5, - "STS22 (pol-Latn_eng-Latn)": 42.08, - "STS22 (rus-Cyrl)": 60.06, - "STS22 (fra-Latn)": 42.0, - "STS22 (deu-Latn)": 8.16, - "STS22 (tur-Latn)": 15.46, - "STS22 (deu-Latn_eng-Latn)": 21.55, - "STS22 (ita-Latn)": 39.69, - "STS22 (pol-Latn)": 9.71, - "STS22 (fra-Latn_pol-Latn)": 39.44, - "STS22 (deu-Latn_pol-Latn)": 25.53, - "STS22 (ara-Arab)": 27.95, - "STS22 (spa-Latn_eng-Latn)": 42.77, - "STS22 (spa-Latn_ita-Latn)": 32.83, - "STS22 (spa-Latn)": 45.31, - "STS22 (cmn-Hans_eng-Latn)": 31.25, - "STS22 (en)": 47.06, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 78.12 + "Model": "voyage-multilingual-2", + "SICKFr": 74.9, + "STS22 (fr)": 82.76, + "STSBenchmarkMultilingualSTS (fr)": 82.72 } ] }, "Summarization": { "spearman": [ { - "Model": "rubert-tiny-turbo" + "Model": "voyage-multilingual-2", + "SummEvalFr": 29.96 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "rubert-tiny-turbo" + "Model": "voyage-multilingual-2" } ] } }, - "LLM2Vec-Sheared-Llama-unsupervised": { + "universal-sentence-encoder-multilingual-3": { "BitextMining": { "f1": [ { - "Model": "LLM2Vec-Sheared-Llama-unsupervised" + "Model": "universal-sentence-encoder-multilingual-3" } ] }, "Classification": { "accuracy": [ { - "Model": "LLM2Vec-Sheared-Llama-unsupervised", - "AmazonCounterfactualClassification (en)": 72.93, - "AmazonPolarityClassification": 74.28, - "AmazonReviewsClassification (en)": 36.14, - "Banking77Classification": 79.0, - "EmotionClassification": 42.85, - "ImdbClassification": 71.92, - "MTOPDomainClassification (en)": 91.24, - "MTOPIntentClassification (en)": 74.08, - "MassiveIntentClassification (en)": 69.99, - "MassiveScenarioClassification (en)": 75.15, - "ToxicConversationsClassification": 68.4, - "TweetSentimentExtractionClassification": 56.08 + "Model": "universal-sentence-encoder-multilingual-3", + "AmazonReviewsClassification (fr)": 33.51, + "MTOPDomainClassification (fr)": 85.5, + "MTOPIntentClassification (fr)": 53.98, + "MasakhaNEWSClassification (fra)": 82.06, + "MassiveIntentClassification (fr)": 61.19, + "MassiveScenarioClassification (fr)": 70.22 } ] }, "Clustering": { "v_measure": [ { - "Model": "LLM2Vec-Sheared-Llama-unsupervised", - "ArxivClusteringP2P": 42.92, - "ArxivClusteringS2S": 35.2, - "BiorxivClusteringP2P": 35.02, - "BiorxivClusteringS2S": 27.21, - "MedrxivClusteringP2P": 30.15, - "MedrxivClusteringS2S": 26.96, - "RedditClustering": 38.67, - "RedditClusteringP2P": 53.42, - "StackExchangeClustering": 59.35, - "StackExchangeClusteringP2P": 31.47, - "TwentyNewsgroupsClustering": 31.54 + "Model": "universal-sentence-encoder-multilingual-3", + "AlloProfClusteringP2P": 56.9, + "AlloProfClusteringS2S": 37.84, + "HALClusteringS2S": 18.95, + "MLSUMClusteringP2P": 43.9, + "MLSUMClusteringS2S": 35.5, + "MasakhaNEWSClusteringP2P (fra)": 60.57, + "MasakhaNEWSClusteringS2S (fra)": 40.31 } ] }, "PairClassification": { "ap": [ { - "Model": "LLM2Vec-Sheared-Llama-unsupervised", - "SprintDuplicateQuestions": 77.36, - "TwitterSemEval2015": 61.54, - "TwitterURLCorpus": 77.73 + "Model": "universal-sentence-encoder-multilingual-3", + "OpusparcusPC (fr)": 91.46, + "PawsXPairClassification (fr)": 52.39 } ] }, "Reranking": { "map": [ { - "Model": "LLM2Vec-Sheared-Llama-unsupervised", - "AskUbuntuDupQuestions": 52.7, - "MindSmallReranking": 29.52, - "SciDocsRR": 67.76, - "StackOverflowDupQuestions": 40.82 + "Model": "universal-sentence-encoder-multilingual-3", + "AlloprofReranking": 56.23, + "SyntecReranking": 73.85 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "LLM2Vec-Sheared-Llama-unsupervised", - "ArguAna": 43.64, - "CQADupstackRetrieval": 18.5, - "ClimateFEVER": 18.95, - "DBPedia": 13.21, - "FEVER": 16.96, - "FiQA2018": 16.99, - "HotpotQA": 22.64, - "MSMARCO": 7.03, - "NFCorpus": 15.73, - "NQ": 17.96, - "QuoraRetrieval": 78.23, - "SCIDOCS": 5.53, - "SciFact": 38.31, - "TRECCOVID": 56.04, - "Touche2020": 19.17 + "Model": "universal-sentence-encoder-multilingual-3", + "AlloprofRetrieval": 35.27, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 26.12, + "SyntecRetrieval": 69.82, + "XPQARetrieval (fr)": 59.59 } ] }, "STS": { "spearman": [ { - "Model": "LLM2Vec-Sheared-Llama-unsupervised", - "BIOSSES": 75.12, - "SICK-R": 69.34, - "STS12": 60.09, - "STS13": 72.52, - "STS14": 66.7, - "STS15": 77.69, - "STS16": 75.94, - "STS17 (en-en)": 81.67, - "STS22 (en)": 63.7, - "STSBenchmark": 73.36 + "Model": "universal-sentence-encoder-multilingual-3", + "SICKFr": 71.37, + "STS22 (fr)": 77.91, + "STSBenchmarkMultilingualSTS (fr)": 75.48 } ] }, "Summarization": { "spearman": [ { - "Model": "LLM2Vec-Sheared-Llama-unsupervised", - "SummEval": 31.23 + "Model": "universal-sentence-encoder-multilingual-3", + "SummEvalFr": 28.21 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "LLM2Vec-Sheared-Llama-unsupervised" + "Model": "universal-sentence-encoder-multilingual-3" } ] } }, - "sentence-t5-xl": { + "rubert-base-cased-sentence": { "BitextMining": { "f1": [ { - "Model": "sentence-t5-xl", - "BUCC (de-en)": 95.04, - "BUCC (fr-en)": 94.96, - "BUCC (ru-en)": 8.33, - "BUCC (zh-en)": 1.3, - "Tatoeba (afr-eng)": 41.84, - "Tatoeba (amh-eng)": 0.03, - "Tatoeba (ang-eng)": 37.87, - "Tatoeba (ara-eng)": 0.61, - "Tatoeba (arq-eng)": 0.74, - "Tatoeba (arz-eng)": 0.42, - "Tatoeba (ast-eng)": 65.41, - "Tatoeba (awa-eng)": 1.46, - "Tatoeba (aze-eng)": 8.79, - "Tatoeba (bel-eng)": 5.76, - "Tatoeba (ben-eng)": 0.01, - "Tatoeba (ber-eng)": 5.92, - "Tatoeba (bos-eng)": 16.12, - "Tatoeba (bre-eng)": 6.12, - "Tatoeba (bul-eng)": 9.06, - "Tatoeba (cat-eng)": 57.4, - "Tatoeba (cbk-eng)": 57.68, - "Tatoeba (ceb-eng)": 12.56, - "Tatoeba (ces-eng)": 9.47, - "Tatoeba (cha-eng)": 27.13, - "Tatoeba (cmn-eng)": 1.82, - "Tatoeba (cor-eng)": 3.87, - "Tatoeba (csb-eng)": 14.41, - "Tatoeba (cym-eng)": 6.69, - "Tatoeba (dan-eng)": 54.87, - "Tatoeba (deu-eng)": 93.72, - "Tatoeba (dsb-eng)": 14.74, - "Tatoeba (dtp-eng)": 5.84, - "Tatoeba (ell-eng)": 0.6, - "Tatoeba (epo-eng)": 30.8, - "Tatoeba (est-eng)": 5.39, - "Tatoeba (eus-eng)": 11.9, - "Tatoeba (fao-eng)": 28.08, - "Tatoeba (fin-eng)": 6.81, - "Tatoeba (fra-eng)": 85.29, - "Tatoeba (fry-eng)": 38.68, - "Tatoeba (gla-eng)": 2.96, - "Tatoeba (gle-eng)": 3.74, - "Tatoeba (glg-eng)": 70.0, - "Tatoeba (gsw-eng)": 30.49, - "Tatoeba (heb-eng)": 0.87, - "Tatoeba (hin-eng)": 0.1, - "Tatoeba (hrv-eng)": 17.43, - "Tatoeba (hsb-eng)": 14.69, - "Tatoeba (hun-eng)": 7.28, - "Tatoeba (hye-eng)": 0.77, - "Tatoeba (ido-eng)": 46.65, - "Tatoeba (ile-eng)": 59.43, - "Tatoeba (ina-eng)": 82.71, - "Tatoeba (ind-eng)": 37.26, - "Tatoeba (isl-eng)": 11.21, - "Tatoeba (ita-eng)": 79.77, - "Tatoeba (jav-eng)": 7.81, - "Tatoeba (jpn-eng)": 0.91, - "Tatoeba (kab-eng)": 2.23, - "Tatoeba (kat-eng)": 1.48, - "Tatoeba (kaz-eng)": 1.77, - "Tatoeba (khm-eng)": 0.38, - "Tatoeba (kor-eng)": 1.96, - "Tatoeba (kur-eng)": 12.11, - "Tatoeba (kzj-eng)": 6.13, - "Tatoeba (lat-eng)": 27.84, - "Tatoeba (lfn-eng)": 45.89, - "Tatoeba (lit-eng)": 5.94, - "Tatoeba (lvs-eng)": 8.11, - "Tatoeba (mal-eng)": 0.59, - "Tatoeba (mar-eng)": 0.03, - "Tatoeba (max-eng)": 21.7, - "Tatoeba (mhr-eng)": 0.68, - "Tatoeba (mkd-eng)": 5.92, - "Tatoeba (mon-eng)": 2.39, - "Tatoeba (nds-eng)": 45.04, - "Tatoeba (nld-eng)": 64.75, - "Tatoeba (nno-eng)": 36.74, - "Tatoeba (nob-eng)": 54.77, - "Tatoeba (nov-eng)": 57.12, - "Tatoeba (oci-eng)": 34.39, - "Tatoeba (orv-eng)": 2.04, - "Tatoeba (pam-eng)": 8.34, - "Tatoeba (pes-eng)": 0.87, - "Tatoeba (pms-eng)": 38.06, - "Tatoeba (pol-eng)": 28.35, - "Tatoeba (por-eng)": 83.61, - "Tatoeba (ron-eng)": 65.27, - "Tatoeba (rus-eng)": 30.42, - "Tatoeba (slk-eng)": 13.19, - "Tatoeba (slv-eng)": 13.49, - "Tatoeba (spa-eng)": 89.18, - "Tatoeba (sqi-eng)": 14.66, - "Tatoeba (srp-eng)": 13.24, - "Tatoeba (swe-eng)": 60.67, - "Tatoeba (swg-eng)": 34.76, - "Tatoeba (swh-eng)": 8.07, - "Tatoeba (tam-eng)": 0.36, - "Tatoeba (tat-eng)": 1.46, - "Tatoeba (tel-eng)": 0.67, - "Tatoeba (tgl-eng)": 25.22, - "Tatoeba (tha-eng)": 1.58, - "Tatoeba (tuk-eng)": 4.99, - "Tatoeba (tur-eng)": 7.72, - "Tatoeba (tzl-eng)": 38.49, - "Tatoeba (uig-eng)": 0.87, - "Tatoeba (ukr-eng)": 9.12, - "Tatoeba (urd-eng)": 0.0, - "Tatoeba (uzb-eng)": 5.48, - "Tatoeba (vie-eng)": 8.45, - "Tatoeba (war-eng)": 13.75, - "Tatoeba (wuu-eng)": 1.44, - "Tatoeba (xho-eng)": 9.15, - "Tatoeba (yid-eng)": 0.28, - "Tatoeba (yue-eng)": 0.98, - "Tatoeba (zsm-eng)": 35.71 + "Model": "rubert-base-cased-sentence", + "Tatoeba (rus-Cyrl_eng-Latn)": 20.26 } ] }, "Classification": { "accuracy": [ { - "Model": "sentence-t5-xl", - "AmazonCounterfactualClassification (de)": 67.01, - "AmazonCounterfactualClassification (en)": 76.01, - "AmazonCounterfactualClassification (en-ext)": 77.29, - "AmazonCounterfactualClassification (ja)": 45.61, - "AmazonPolarityClassification": 93.17, - "AmazonReviewsClassification (de)": 44.05, - "AmazonReviewsClassification (en)": 48.18, - "AmazonReviewsClassification (es)": 45.01, - "AmazonReviewsClassification (fr)": 43.52, - "AmazonReviewsClassification (ja)": 22.23, - "AmazonReviewsClassification (zh)": 21.88, - "Banking77Classification": 80.88, - "EmotionClassification": 51.95, - "ImdbClassification": 87.54, - "MTOPDomainClassification (de)": 83.28, - "MTOPDomainClassification (en)": 90.73, - "MTOPDomainClassification (es)": 85.32, - "MTOPDomainClassification (fr)": 85.14, - "MTOPDomainClassification (hi)": 20.85, - "MTOPDomainClassification (th)": 15.62, - "MTOPIntentClassification (de)": 54.65, - "MTOPIntentClassification (en)": 68.15, - "MTOPIntentClassification (es)": 57.38, - "MTOPIntentClassification (fr)": 54.39, - "MTOPIntentClassification (hi)": 3.28, - "MTOPIntentClassification (th)": 5.08, - "MasakhaNEWSClassification (fra)": 80.09, - "MassiveIntentClassification (af)": 40.17, - "MassiveIntentClassification (am)": 2.18, - "MassiveIntentClassification (ar)": 4.18, - "MassiveIntentClassification (az)": 30.02, - "MassiveIntentClassification (bn)": 2.6, - "MassiveIntentClassification (cy)": 29.15, - "MassiveIntentClassification (da)": 47.69, - "MassiveIntentClassification (de)": 57.43, - "MassiveIntentClassification (el)": 9.96, - "MassiveIntentClassification (en)": 72.09, - "MassiveIntentClassification (es)": 57.97, - "MassiveIntentClassification (fa)": 3.6, - "MassiveIntentClassification (fi)": 34.02, - "MassiveIntentClassification (fr)": 60.99, - "MassiveIntentClassification (he)": 2.51, - "MassiveIntentClassification (hi)": 3.02, - "MassiveIntentClassification (hu)": 31.66, - "MassiveIntentClassification (hy)": 3.32, - "MassiveIntentClassification (id)": 41.53, - "MassiveIntentClassification (is)": 30.25, - "MassiveIntentClassification (it)": 56.57, - "MassiveIntentClassification (ja)": 3.5, - "MassiveIntentClassification (jv)": 31.67, - "MassiveIntentClassification (ka)": 2.79, - "MassiveIntentClassification (km)": 5.43, - "MassiveIntentClassification (kn)": 2.79, - "MassiveIntentClassification (ko)": 2.67, - "MassiveIntentClassification (lv)": 34.25, - "MassiveIntentClassification (ml)": 2.98, - "MassiveIntentClassification (mn)": 20.99, - "MassiveIntentClassification (ms)": 37.43, - "MassiveIntentClassification (my)": 4.02, - "MassiveIntentClassification (nb)": 45.91, - "MassiveIntentClassification (nl)": 50.51, - "MassiveIntentClassification (pl)": 43.95, - "MassiveIntentClassification (pt)": 57.95, - "MassiveIntentClassification (ro)": 49.37, - "MassiveIntentClassification (ru)": 33.46, - "MassiveIntentClassification (sl)": 36.33, - "MassiveIntentClassification (sq)": 37.65, - "MassiveIntentClassification (sv)": 46.35, - "MassiveIntentClassification (sw)": 30.6, - "MassiveIntentClassification (ta)": 1.79, - "MassiveIntentClassification (te)": 2.26, - "MassiveIntentClassification (th)": 4.02, - "MassiveIntentClassification (tl)": 38.92, - "MassiveIntentClassification (tr)": 32.05, - "MassiveIntentClassification (ur)": 2.7, - "MassiveIntentClassification (vi)": 21.47, - "MassiveIntentClassification (zh-CN)": 0.59, - "MassiveIntentClassification (zh-TW)": 3.24, - "MassiveScenarioClassification (af)": 50.81, - "MassiveScenarioClassification (am)": 6.95, - "MassiveScenarioClassification (ar)": 12.32, - "MassiveScenarioClassification (az)": 38.79, - "MassiveScenarioClassification (bn)": 8.0, - "MassiveScenarioClassification (cy)": 33.91, - "MassiveScenarioClassification (da)": 55.79, - "MassiveScenarioClassification (de)": 65.33, - "MassiveScenarioClassification (el)": 16.89, - "MassiveScenarioClassification (en)": 73.26, - "MassiveScenarioClassification (es)": 62.52, - "MassiveScenarioClassification (fa)": 6.08, - "MassiveScenarioClassification (fi)": 43.34, - "MassiveScenarioClassification (fr)": 66.42, - "MassiveScenarioClassification (he)": 7.55, - "MassiveScenarioClassification (hi)": 7.44, - "MassiveScenarioClassification (hu)": 40.85, - "MassiveScenarioClassification (hy)": 9.25, - "MassiveScenarioClassification (id)": 51.92, - "MassiveScenarioClassification (is)": 40.09, - "MassiveScenarioClassification (it)": 62.94, - "MassiveScenarioClassification (ja)": 7.9, - "MassiveScenarioClassification (jv)": 41.33, - "MassiveScenarioClassification (ka)": 7.76, - "MassiveScenarioClassification (km)": 9.19, - "MassiveScenarioClassification (kn)": 8.36, - "MassiveScenarioClassification (ko)": 6.13, - "MassiveScenarioClassification (lv)": 40.7, - "MassiveScenarioClassification (ml)": 6.98, - "MassiveScenarioClassification (mn)": 27.0, - "MassiveScenarioClassification (ms)": 46.9, - "MassiveScenarioClassification (my)": 9.55, - "MassiveScenarioClassification (nb)": 53.43, - "MassiveScenarioClassification (nl)": 59.65, - "MassiveScenarioClassification (pl)": 49.87, - "MassiveScenarioClassification (pt)": 62.18, - "MassiveScenarioClassification (ro)": 58.22, - "MassiveScenarioClassification (ru)": 40.73, - "MassiveScenarioClassification (sl)": 43.66, - "MassiveScenarioClassification (sq)": 49.25, - "MassiveScenarioClassification (sv)": 57.17, - "MassiveScenarioClassification (sw)": 40.55, - "MassiveScenarioClassification (ta)": 7.46, - "MassiveScenarioClassification (te)": 7.03, - "MassiveScenarioClassification (th)": 8.52, - "MassiveScenarioClassification (tl)": 51.74, - "MassiveScenarioClassification (tr)": 43.01, - "MassiveScenarioClassification (ur)": 9.61, - "MassiveScenarioClassification (vi)": 28.91, - "MassiveScenarioClassification (zh-CN)": 5.86, - "MassiveScenarioClassification (zh-TW)": 7.14, - "ToxicConversationsClassification": 70.95, - "TweetSentimentExtractionClassification": 61.21 + "Model": "rubert-base-cased-sentence", + "GeoreviewClassification (rus-Cyrl)": 38.05, + "HeadlineClassification (rus-Cyrl)": 67.64, + "InappropriatenessClassification (rus-Cyrl)": 58.27, + "KinopoiskClassification (rus-Cyrl)": 45.86, + "MassiveIntentClassification (rus-Cyrl)": 49.1, + "MassiveScenarioClassification (rus-Cyrl)": 51.91, + "RuReviewsClassification (rus-Cyrl)": 58.34, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 52.18, + "RuSciBenchOECDClassification (rus-Cyrl)": 40.11 } ] }, "Clustering": { "v_measure": [ { - "Model": "sentence-t5-xl", - "AlloProfClusteringP2P": 60.37, - "AlloProfClusteringS2S": 40.76, - "ArxivClusteringP2P": 41.62, - "ArxivClusteringS2S": 31.17, - "BiorxivClusteringP2P": 36.43, - "BiorxivClusteringS2S": 26.47, - "HALClusteringS2S": 20.28, - "MLSUMClusteringP2P": 41.61, - "MLSUMClusteringS2S": 33.6, - "MasakhaNEWSClusteringP2P (fra)": 62.82, - "MasakhaNEWSClusteringS2S (fra)": 31.74, - "MedrxivClusteringP2P": 32.3, - "MedrxivClusteringS2S": 26.93, - "RedditClustering": 57.03, - "RedditClusteringP2P": 62.34, - "StackExchangeClustering": 67.13, - "StackExchangeClusteringP2P": 34.79, - "TwentyNewsgroupsClustering": 49.53 + "Model": "rubert-base-cased-sentence", + "GeoreviewClusteringP2P (rus-Cyrl)": 41.82, + "MLSUMClusteringP2P (rus-Cyrl)": 43.71, + "MLSUMClusteringS2S (rus-Cyrl)": 45.94, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 46.29, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.28 } ] }, "PairClassification": { "ap": [ { - "Model": "sentence-t5-xl", - "OpusparcusPC (fr)": 92.48, - "PawsXPairClassification (fr)": 62.52, - "SprintDuplicateQuestions": 91.44, - "TwitterSemEval2015": 80.89, - "TwitterURLCorpus": 85.86 + "Model": "rubert-base-cased-sentence", + "OpusparcusPC (rus-Cyrl)": 81.52, + "TERRa (rus-Cyrl)": 59.12 } ] }, "Reranking": { "map": [ { - "Model": "sentence-t5-xl", - "AlloprofReranking": 63.3, - "AskUbuntuDupQuestions": 62.86, - "MindSmallReranking": 29.77, - "SciDocsRR": 75.16, - "StackOverflowDupQuestions": 51.05, - "SyntecReranking": 83.07 + "Model": "rubert-base-cased-sentence", + "RuBQReranking (rus-Cyrl)": 39.89 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "sentence-t5-xl", - "AlloprofRetrieval": 40.38, - "ArguAna": 39.4, - "BSARDRetrieval": 0.14, - "CQADupstackRetrieval": 40.78, - "ClimateFEVER": 10.61, - "DBPedia": 33.65, - "FEVER": 36.12, - "FiQA2018": 44.71, - "HotpotQA": 37.17, - "MSMARCO": 25.17, - "MintakaRetrieval (fr)": 31.54, - "NFCorpus": 33.18, - "NQ": 46.29, - "QuoraRetrieval": 85.85, - "SCIDOCS": 15.97, - "SciFact": 50.91, - "SyntecRetrieval": 74.24, - "TRECCOVID": 54.77, - "Touche2020": 22.51, - "XPQARetrieval (fr)": 52.14 + "Model": "rubert-base-cased-sentence", + "RiaNewsRetrieval (rus-Cyrl)": 6.72, + "RuBQRetrieval (rus-Cyrl)": 12.63 } ] }, "STS": { "spearman": [ { - "Model": "sentence-t5-xl", - "BIOSSES": 73.12, - "SICK-R": 79.98, - "SICKFr": 75.08, - "STS12": 79.02, - "STS13": 88.8, - "STS14": 84.33, - "STS15": 88.89, - "STS16": 85.31, - "STS17 (ar-ar)": 11.13, - "STS17 (en-ar)": -3.93, - "STS17 (en-de)": 79.04, - "STS17 (en-en)": 88.91, - "STS17 (en-tr)": 13.61, - "STS17 (es-en)": 71.72, - "STS17 (es-es)": 83.42, - "STS17 (fr-en)": 71.38, - "STS17 (it-en)": 69.5, - "STS17 (ko-ko)": 9.61, - "STS17 (nl-en)": 66.12, - "STS22 (ar)": 29.6, - "STS22 (de)": 47.72, - "STS22 (de-en)": 49.64, - "STS22 (de-fr)": 62.21, - "STS22 (de-pl)": 34.34, - "STS22 (en)": 64.32, - "STS22 (es)": 58.16, - "STS22 (es-en)": 69.15, - "STS22 (es-it)": 65.26, - "STS22 (fr)": 77.49, - "STS22 (fr-pl)": 50.71, - "STS22 (it)": 66.91, - "STS22 (pl)": 27.04, - "STS22 (pl-en)": 58.85, - "STS22 (ru)": 26.63, - "STS22 (tr)": 43.36, - "STS22 (zh)": 33.55, - "STS22 (zh-en)": 29.0, - "STSBenchmark": 83.93, - "STSBenchmarkMultilingualSTS (fr)": 79.42 + "Model": "rubert-base-cased-sentence", + "RUParaPhraserSTS (rus-Cyrl)": 66.24, + "RuSTSBenchmarkSTS (rus-Cyrl)": 66.03, + "STS22 (rus-Cyrl)": 51.27, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 66.71 } ] }, "Summarization": { "spearman": [ { - "Model": "sentence-t5-xl", - "SummEval": 29.91, - "SummEvalFr": 31.59 + "Model": "rubert-base-cased-sentence" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "sentence-t5-xl" + "Model": "rubert-base-cased-sentence" } ] } }, - "multilingual-e5-base": { + "gelectra-base": { "BitextMining": { "f1": [ { - "Model": "multilingual-e5-base", - "BornholmBitextMining (dan-Latn)": 33.22, - "BornholmBitextMining": 46.4, - "Tatoeba (kzj-Latn_eng-Latn)": 6.26, - "Tatoeba (ina-Latn_eng-Latn)": 86.11, - "Tatoeba (bre-Latn_eng-Latn)": 5.44, - "Tatoeba (kab-Latn_eng-Latn)": 21.77, - "Tatoeba (ind-Latn_eng-Latn)": 90.26, - "Tatoeba (mkd-Cyrl_eng-Latn)": 73.76, - "Tatoeba (yue-Hant_eng-Latn)": 80.66, - "Tatoeba (amh-Ethi_eng-Latn)": 74.93, - "Tatoeba (ceb-Latn_eng-Latn)": 45.46, - "Tatoeba (lit-Latn_eng-Latn)": 75.53, - "Tatoeba (nds-Latn_eng-Latn)": 53.86, - "Tatoeba (kur-Latn_eng-Latn)": 52.96, - "Tatoeba (bel-Cyrl_eng-Latn)": 86.7, - "Tatoeba (ile-Latn_eng-Latn)": 72.56, - "Tatoeba (oci-Latn_eng-Latn)": 35.79, - "Tatoeba (heb-Hebr_eng-Latn)": 74.26, - "Tatoeba (mhr-Cyrl_eng-Latn)": 5.52, - "Tatoeba (afr-Latn_eng-Latn)": 87.04, - "Tatoeba (uig-Arab_eng-Latn)": 62.97, - "Tatoeba (mar-Deva_eng-Latn)": 86.62, - "Tatoeba (fry-Latn_eng-Latn)": 50.82, - "Tatoeba (tat-Cyrl_eng-Latn)": 66.92, - "Tatoeba (khm-Khmr_eng-Latn)": 47.27, - "Tatoeba (dtp-Latn_eng-Latn)": 5.13, - "Tatoeba (ben-Beng_eng-Latn)": 81.05, - "Tatoeba (ido-Latn_eng-Latn)": 74.41, - "Tatoeba (cha-Latn_eng-Latn)": 16.95, - "Tatoeba (zsm-Latn_eng-Latn)": 92.45, - "Tatoeba (pes-Arab_eng-Latn)": 87.18, - "Tatoeba (hye-Armn_eng-Latn)": 85.85, - "Tatoeba (cat-Latn_eng-Latn)": 84.09, - "Tatoeba (cym-Latn_eng-Latn)": 65.69, - "Tatoeba (aze-Latn_eng-Latn)": 84.71, - "Tatoeba (yid-Hebr_eng-Latn)": 63.2, - "Tatoeba (swg-Latn_eng-Latn)": 42.33, - "Tatoeba (war-Latn_eng-Latn)": 47.18, - "Tatoeba (swe-Latn_eng-Latn)": 91.33, - "Tatoeba (slk-Latn_eng-Latn)": 86.42, - "Tatoeba (gla-Latn_eng-Latn)": 43.08, - "Tatoeba (xho-Latn_eng-Latn)": 73.24, - "Tatoeba (dan-Latn_eng-Latn)": 91.23, - "Tatoeba (ara-Arab_eng-Latn)": 82.86, - "Tatoeba (ast-Latn_eng-Latn)": 74.36, - "Tatoeba (hrv-Latn_eng-Latn)": 92.5, - "Tatoeba (nob-Latn_eng-Latn)": 95.9, - "Tatoeba (eus-Latn_eng-Latn)": 56.26, - "Tatoeba (kaz-Cyrl_eng-Latn)": 75.56, - "Tatoeba (tuk-Latn_eng-Latn)": 19.67, - "Tatoeba (pam-Latn_eng-Latn)": 6.92, - "Tatoeba (gsw-Latn_eng-Latn)": 43.53, - "Tatoeba (slv-Latn_eng-Latn)": 81.93, - "Tatoeba (dsb-Latn_eng-Latn)": 34.36, - "Tatoeba (cor-Latn_eng-Latn)": 4.38, - "Tatoeba (ces-Latn_eng-Latn)": 88.75, - "Tatoeba (tam-Taml_eng-Latn)": 85.12, - "Tatoeba (glg-Latn_eng-Latn)": 82.69, - "Tatoeba (bul-Cyrl_eng-Latn)": 88.95, - "Tatoeba (deu-Latn_eng-Latn)": 97.07, - "Tatoeba (fin-Latn_eng-Latn)": 86.15, - "Tatoeba (csb-Latn_eng-Latn)": 24.29, - "Tatoeba (urd-Arab_eng-Latn)": 86.2, - "Tatoeba (est-Latn_eng-Latn)": 70.64, - "Tatoeba (wuu-Hans_eng-Latn)": 78.65, - "Tatoeba (tha-Thai_eng-Latn)": 94.22, - "Tatoeba (spa-Latn_eng-Latn)": 96.97, - "Tatoeba (ukr-Cyrl_eng-Latn)": 88.29, - "Tatoeba (awa-Deva_eng-Latn)": 68.39, - "Tatoeba (mal-Mlym_eng-Latn)": 96.72, - "Tatoeba (cbk-Latn_eng-Latn)": 60.66, - "Tatoeba (hsb-Latn_eng-Latn)": 40.36, - "Tatoeba (tzl-Latn_eng-Latn)": 34.44, - "Tatoeba (gle-Latn_eng-Latn)": 58.62, - "Tatoeba (orv-Cyrl_eng-Latn)": 16.0, - "Tatoeba (isl-Latn_eng-Latn)": 76.9, - "Tatoeba (jav-Latn_eng-Latn)": 61.25, - "Tatoeba (fao-Latn_eng-Latn)": 64.72, - "Tatoeba (pol-Latn_eng-Latn)": 94.57, - "Tatoeba (max-Deva_eng-Latn)": 52.4, - "Tatoeba (bos-Latn_eng-Latn)": 88.86, - "Tatoeba (hun-Latn_eng-Latn)": 84.41, - "Tatoeba (rus-Cyrl_eng-Latn)": 91.78, - "Tatoeba (arq-Arab_eng-Latn)": 26.61, - "Tatoeba (kor-Hang_eng-Latn)": 83.37, - "Tatoeba (uzb-Latn_eng-Latn)": 62.63, - "Tatoeba (pms-Latn_eng-Latn)": 44.61, - "Tatoeba (ell-Grek_eng-Latn)": 89.96, - "Tatoeba (swh-Latn_eng-Latn)": 66.81, - "Tatoeba (epo-Latn_eng-Latn)": 92.07, - "Tatoeba (jpn-Jpan_eng-Latn)": 90.3, - "Tatoeba (tel-Telu_eng-Latn)": 88.49, - "Tatoeba (srp-Cyrl_eng-Latn)": 89.08, - "Tatoeba (nov-Latn_eng-Latn)": 66.96, - "Tatoeba (cmn-Hans_eng-Latn)": 93.35, - "Tatoeba (tgl-Latn_eng-Latn)": 83.78, - "Tatoeba (ber-Tfng_eng-Latn)": 23.59, - "Tatoeba (sqi-Latn_eng-Latn)": 90.06, - "Tatoeba (ang-Latn_eng-Latn)": 29.87, - "Tatoeba (ita-Latn_eng-Latn)": 90.61, - "Tatoeba (por-Latn_eng-Latn)": 92.74, - "Tatoeba (mon-Cyrl_eng-Latn)": 78.37, - "Tatoeba (fra-Latn_eng-Latn)": 92.76, - "Tatoeba (lat-Latn_eng-Latn)": 39.62, - "Tatoeba (nno-Latn_eng-Latn)": 82.67, - "Tatoeba (arz-Arab_eng-Latn)": 66.79, - "Tatoeba (hin-Deva_eng-Latn)": 93.13, - "Tatoeba (nld-Latn_eng-Latn)": 93.2, - "Tatoeba (kat-Geor_eng-Latn)": 77.83, - "Tatoeba (lfn-Latn_eng-Latn)": 52.85, - "Tatoeba (lvs-Latn_eng-Latn)": 76.76, - "Tatoeba (tur-Latn_eng-Latn)": 92.54, - "Tatoeba (ron-Latn_eng-Latn)": 91.27, - "Tatoeba (vie-Latn_eng-Latn)": 94.55 + "Model": "gelectra-base" } ] }, "Classification": { "accuracy": [ { - "Model": "multilingual-e5-base", - "AllegroReviews (pol-Latn)": 40.78, - "AllegroReviews": 40.85, - "AmazonCounterfactualClassification (en-ext)": 76.91, - "AmazonCounterfactualClassification (en)": 77.36, - "AmazonCounterfactualClassification (deu-Latn)": 70.81, - "AmazonCounterfactualClassification (jpn-Jpan)": 72.02, - "AmazonPolarityClassification": 91.76, - "AmazonReviewsClassification (en)": 47.54, - "AmazonReviewsClassification (deu-Latn)": 44.37, - "AmazonReviewsClassification (spa-Latn)": 43.38, - "AmazonReviewsClassification (fra-Latn)": 41.55, - "AmazonReviewsClassification (jpn-Jpan)": 39.57, - "AmazonReviewsClassification (cmn-Hans)": 38.34, - "AmazonReviewsClassification (fr)": 40.94, - "AngryTweetsClassification (dan-Latn)": 56.28, - "AngryTweetsClassification": 54.65, - "Banking77Classification": 73.53, - "CBD (pol-Latn)": 62.6, - "CBD": 62.66, - "DKHateClassification": 63.53, - "DanishPoliticalCommentsClassification (dan-Latn)": 36.41, - "DanishPoliticalCommentsClassification": 36.69, - "EmotionClassification": 45.68, - "GeoreviewClassification (rus-Cyrl)": 46.05, - "HeadlineClassification (rus-Cyrl)": 75.64, - "IFlyTek (cmn-Hans)": 40.81, - "IFlyTek": 44.93, - "ImdbClassification": 84.29, - "InappropriatenessClassification (rus-Cyrl)": 58.78, - "JDReview (cmn-Hans)": 75.72, - "JDReview": 76.21, - "KinopoiskClassification (rus-Cyrl)": 50.89, - "LccSentimentClassification (dan-Latn)": 60.13, - "LccSentimentClassification": 59.67, - "MTOPDomainClassification (en)": 90.9, - "MTOPDomainClassification (deu-Latn)": 87.94, - "MTOPDomainClassification (spa-Latn)": 85.96, - "MTOPDomainClassification (fra-Latn)": 82.88, - "MTOPDomainClassification (hin-Deva)": 83.92, - "MTOPDomainClassification (tha-Thai)": 83.94, - "MTOPDomainClassification (fr)": 84.79, - "MTOPIntentClassification (en)": 61.6, - "MTOPIntentClassification (deu-Latn)": 61.05, - "MTOPIntentClassification (spa-Latn)": 55.36, - "MTOPIntentClassification (fra-Latn)": 52.23, - "MTOPIntentClassification (hin-Deva)": 53.93, - "MTOPIntentClassification (tha-Thai)": 58.69, - "MTOPIntentClassification (fr)": 55.51, - "MasakhaNEWSClassification (amh-Ethi)": 83.8, - "MasakhaNEWSClassification (eng)": 76.49, - "MasakhaNEWSClassification (fra-Latn)": 76.35, - "MasakhaNEWSClassification (hau-Latn)": 74.63, - "MasakhaNEWSClassification (ibo-Latn)": 64.59, - "MasakhaNEWSClassification (lin-Latn)": 70.57, - "MasakhaNEWSClassification (lug-Latn)": 68.12, - "MasakhaNEWSClassification (orm-Ethi)": 71.75, - "MasakhaNEWSClassification (pcm-Latn)": 91.05, - "MasakhaNEWSClassification (run-Latn)": 73.35, - "MasakhaNEWSClassification (sna-Latn)": 84.17, - "MasakhaNEWSClassification (som-Latn)": 60.1, - "MasakhaNEWSClassification (swa-Latn)": 70.74, - "MasakhaNEWSClassification (tir-Ethi)": 67.1, - "MasakhaNEWSClassification (xho-Latn)": 76.03, - "MasakhaNEWSClassification (yor-Latn)": 72.75, - "MasakhaNEWSClassification (fra)": 79.69, - "MassiveIntentClassification (tha-Thai)": 59.63, - "MassiveIntentClassification (tam-Taml)": 48.93, - "MassiveIntentClassification (fin-Latn)": 58.91, - "MassiveIntentClassification (rus-Cyrl)": 62.78, - "MassiveIntentClassification (afr-Latn)": 49.82, - "MassiveIntentClassification (heb-Hebr)": 55.3, - "MassiveIntentClassification (sqi-Latn)": 51.07, - "MassiveIntentClassification (por-Latn)": 62.12, - "MassiveIntentClassification (hye-Armn)": 48.77, - "MassiveIntentClassification (cym-Latn)": 37.05, - "MassiveIntentClassification (deu-Latn)": 59.82, - "MassiveIntentClassification (fas-Arab)": 59.51, - "MassiveIntentClassification (hun-Latn)": 57.69, - "MassiveIntentClassification (urd-Arab)": 51.3, - "MassiveIntentClassification (cmo-Hant)": 56.4, - "MassiveIntentClassification (khm-Khmr)": 32.14, - "MassiveIntentClassification (tel-Telu)": 50.09, - "MassiveIntentClassification (vie-Latn)": 59.61, - "MassiveIntentClassification (kan-Knda)": 48.63, - "MassiveIntentClassification (ara-Arab)": 50.2, - "MassiveIntentClassification (mya-Mymr)": 46.67, - "MassiveIntentClassification (slv-Latn)": 53.84, - "MassiveIntentClassification (jpn-Jpan)": 62.3, - "MassiveIntentClassification (mon-Cyrl)": 46.8, - "MassiveIntentClassification (jav-Latn)": 43.23, - "MassiveIntentClassification (lav-Latn)": 51.17, - "MassiveIntentClassification (ron-Latn)": 56.83, - "MassiveIntentClassification (dan-Latn)": 60.69, - "MassiveIntentClassification (nob-Latn)": 60.06, - "MassiveIntentClassification (tgl-Latn)": 48.99, - "MassiveIntentClassification (aze-Latn)": 51.36, - "MassiveIntentClassification (ind-Latn)": 58.7, - "MassiveIntentClassification (amh-Ethi)": 42.4, - "MassiveIntentClassification (ben-Beng)": 51.69, - "MassiveIntentClassification (ell-Grek)": 58.07, - "MassiveIntentClassification (hin-Deva)": 56.75, - "MassiveIntentClassification (nld-Latn)": 61.23, - "MassiveIntentClassification (pol-Latn)": 60.98, - "MassiveIntentClassification (swe-Latn)": 62.43, - "MassiveIntentClassification (isl-Latn)": 44.52, - "MassiveIntentClassification (mal-Mlym)": 53.75, - "MassiveIntentClassification (msa-Latn)": 52.84, - "MassiveIntentClassification (kat-Geor)": 37.56, - "MassiveIntentClassification (tur-Latn)": 60.69, - "MassiveIntentClassification (kor-Kore)": 59.97, - "MassiveIntentClassification (ita-Latn)": 61.29, - "MassiveIntentClassification (cmo-Hans)": 63.22, - "MassiveIntentClassification (en)": 65.71, - "MassiveIntentClassification (fra-Latn)": 61.32, - "MassiveIntentClassification (swa-Latn)": 45.24, - "MassiveIntentClassification (spa-Latn)": 61.13, - "MassiveIntentClassification (da)": 60.16, - "MassiveIntentClassification (nb)": 59.83, - "MassiveIntentClassification (sv)": 61.78, - "MassiveIntentClassification (pl)": 61.04, - "MassiveScenarioClassification (ind-Latn)": 63.6, - "MassiveScenarioClassification (tha-Thai)": 67.37, - "MassiveScenarioClassification (cmo-Hans)": 70.24, - "MassiveScenarioClassification (ben-Beng)": 57.0, - "MassiveScenarioClassification (kan-Knda)": 53.49, - "MassiveScenarioClassification (tel-Telu)": 54.24, - "MassiveScenarioClassification (aze-Latn)": 55.15, - "MassiveScenarioClassification (ell-Grek)": 65.38, - "MassiveScenarioClassification (swa-Latn)": 52.64, - "MassiveScenarioClassification (hin-Deva)": 62.91, - "MassiveScenarioClassification (tur-Latn)": 65.18, - "MassiveScenarioClassification (dan-Latn)": 67.97, - "MassiveScenarioClassification (msa-Latn)": 58.35, - "MassiveScenarioClassification (mya-Mymr)": 50.77, - "MassiveScenarioClassification (mon-Cyrl)": 51.87, - "MassiveScenarioClassification (tgl-Latn)": 54.36, - "MassiveScenarioClassification (cmo-Hant)": 63.73, - "MassiveScenarioClassification (ara-Arab)": 58.0, - "MassiveScenarioClassification (slv-Latn)": 58.3, - "MassiveScenarioClassification (spa-Latn)": 66.47, - "MassiveScenarioClassification (urd-Arab)": 56.74, - "MassiveScenarioClassification (fin-Latn)": 64.94, - "MassiveScenarioClassification (tam-Taml)": 53.86, - "MassiveScenarioClassification (ron-Latn)": 63.5, - "MassiveScenarioClassification (hye-Armn)": 53.63, - "MassiveScenarioClassification (vie-Latn)": 66.35, - "MassiveScenarioClassification (deu-Latn)": 68.4, - "MassiveScenarioClassification (afr-Latn)": 58.95, - "MassiveScenarioClassification (en)": 71.57, - "MassiveScenarioClassification (fra-Latn)": 67.37, - "MassiveScenarioClassification (jpn-Jpan)": 69.89, - "MassiveScenarioClassification (nld-Latn)": 68.62, - "MassiveScenarioClassification (cym-Latn)": 43.84, - "MassiveScenarioClassification (heb-Hebr)": 62.53, - "MassiveScenarioClassification (pol-Latn)": 66.12, - "MassiveScenarioClassification (fas-Arab)": 63.92, - "MassiveScenarioClassification (lav-Latn)": 56.42, - "MassiveScenarioClassification (por-Latn)": 65.49, - "MassiveScenarioClassification (rus-Cyrl)": 68.21, - "MassiveScenarioClassification (mal-Mlym)": 59.89, - "MassiveScenarioClassification (hun-Latn)": 65.75, - "MassiveScenarioClassification (nob-Latn)": 66.57, - "MassiveScenarioClassification (kor-Kore)": 67.9, - "MassiveScenarioClassification (isl-Latn)": 53.28, - "MassiveScenarioClassification (khm-Khmr)": 38.45, - "MassiveScenarioClassification (sqi-Latn)": 57.92, - "MassiveScenarioClassification (jav-Latn)": 51.94, - "MassiveScenarioClassification (amh-Ethi)": 50.33, - "MassiveScenarioClassification (ita-Latn)": 66.17, - "MassiveScenarioClassification (kat-Geor)": 43.38, - "MassiveScenarioClassification (swe-Latn)": 69.35, - "MassiveScenarioClassification (da)": 67.46, - "MassiveScenarioClassification (nb)": 66.18, - "MassiveScenarioClassification (sv)": 69.15, - "MassiveScenarioClassification (pl)": 66.11, - "MultilingualSentiment (cmn-Hans)": 67.56, - "MultilingualSentiment": 65.28, - "NoRecClassification (nob-Latn)": 53.74, - "NoRecClassification": 57.58, - "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 75.85, - "NordicLangClassification": 75.94, - "NorwegianParliament": 59.94, - "OnlineShopping (cmn-Hans)": 88.66, - "OnlineShopping": 88.4, - "PAC (pol-Latn)": 70.87, - "PAC": 70.87, - "PolEmo2.0-IN (pol-Latn)": 67.59, - "PolEmo2.0-IN": 67.66, - "PolEmo2.0-OUT (pol-Latn)": 43.93, - "PolEmo2.0-OUT": 43.91, - "RuReviewsClassification (rus-Cyrl)": 62.99, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 56.28, - "RuSciBenchOECDClassification (rus-Cyrl)": 42.69, - "ScalaDaClassification": 50.79, - "ScalaNbClassification": 50.32, - "TNews (cmn-Hans)": 47.52, - "TNews": 47.06, - "ToxicConversationsClassification": 64.33, - "TweetSentimentExtractionClassification": 62.8, - "Waimai (cmn-Hans)": 85.98, - "Waimai": 84.42 + "Model": "gelectra-base" } ] }, "Clustering": { "v_measure": [ { - "Model": "multilingual-e5-base", - "8TagsClustering": 24.97, - "AlloProfClusteringP2P": 62.09, - "AlloProfClusteringS2S": 32.98, - "ArxivClusteringP2P": 43.35, - "ArxivClusteringS2S": 36.0, - "BiorxivClusteringP2P": 37.55, - "BiorxivClusteringS2S": 30.33, - "CLSClusteringP2P": 32.41, - "CLSClusteringS2S": 36.99, - "GeoreviewClusteringP2P (rus-Cyrl)": 54.46, - "HALClusteringS2S": 22.48, - "MLSUMClusteringP2P (rus-Cyrl)": 43.47, - "MLSUMClusteringP2P": 43.48, - "MLSUMClusteringS2S (rus-Cyrl)": 40.87, - "MLSUMClusteringS2S": 38.53, - "MasakhaNEWSClusteringP2P (amh-Ethi)": 58.05, - "MasakhaNEWSClusteringP2P (eng)": 43.8, - "MasakhaNEWSClusteringP2P (fra-Latn)": 58.28, - "MasakhaNEWSClusteringP2P (hau-Latn)": 44.78, - "MasakhaNEWSClusteringP2P (ibo-Latn)": 44.97, - "MasakhaNEWSClusteringP2P (lin-Latn)": 48.08, - "MasakhaNEWSClusteringP2P (lug-Latn)": 50.15, - "MasakhaNEWSClusteringP2P (orm-Ethi)": 38.02, - "MasakhaNEWSClusteringP2P (pcm-Latn)": 71.03, - "MasakhaNEWSClusteringP2P (run-Latn)": 58.28, - "MasakhaNEWSClusteringP2P (sna-Latn)": 59.25, - "MasakhaNEWSClusteringP2P (som-Latn)": 37.27, - "MasakhaNEWSClusteringP2P (swa-Latn)": 34.54, - "MasakhaNEWSClusteringP2P (tir-Ethi)": 53.44, - "MasakhaNEWSClusteringP2P (xho-Latn)": 40.32, - "MasakhaNEWSClusteringP2P (yor-Latn)": 37.97, - "MasakhaNEWSClusteringP2P (fra)": 47.91, - "MasakhaNEWSClusteringS2S (amh-Ethi)": 49.38, - "MasakhaNEWSClusteringS2S (eng)": 45.76, - "MasakhaNEWSClusteringS2S (fra-Latn)": 55.43, - "MasakhaNEWSClusteringS2S (hau-Latn)": 16.11, - "MasakhaNEWSClusteringS2S (ibo-Latn)": 24.38, - "MasakhaNEWSClusteringS2S (lin-Latn)": 44.8, - "MasakhaNEWSClusteringS2S (lug-Latn)": 45.67, - "MasakhaNEWSClusteringS2S (orm-Ethi)": 26.41, - "MasakhaNEWSClusteringS2S (pcm-Latn)": 83.26, - "MasakhaNEWSClusteringS2S (run-Latn)": 48.77, - "MasakhaNEWSClusteringS2S (sna-Latn)": 43.9, - "MasakhaNEWSClusteringS2S (som-Latn)": 25.43, - "MasakhaNEWSClusteringS2S (swa-Latn)": 9.87, - "MasakhaNEWSClusteringS2S (tir-Ethi)": 51.66, - "MasakhaNEWSClusteringS2S (xho-Latn)": 29.65, - "MasakhaNEWSClusteringS2S (yor-Latn)": 30.12, - "MasakhaNEWSClusteringS2S (fra)": 51.16, - "MedrxivClusteringP2P": 30.6, - "MedrxivClusteringS2S": 28.73, - "RedditClustering": 43.15, - "RedditClusteringP2P": 61.69, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 51.56, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.78, - "StackExchangeClustering": 55.31, - "StackExchangeClusteringP2P": 33.51, - "ThuNewsClusteringP2P": 40.98, - "ThuNewsClusteringS2S": 52.36, - "TwentyNewsgroupsClustering": 35.55 + "Model": "gelectra-base", + "BlurbsClusteringP2P": 10.06, + "BlurbsClusteringS2S": 7.74, + "TenKGnadClusteringP2P": 9.02, + "TenKGnadClusteringS2S": 4.11 } ] }, "PairClassification": { "ap": [ { - "Model": "multilingual-e5-base", - "CDSC-E (pol-Latn)": 72.7, - "CDSC-E": 72.67, - "Cmnli": 74.51, - "Ocnli": 59.63, - "OpusparcusPC (deu-Latn)": 95.83, - "OpusparcusPC (en)": 98.71, - "OpusparcusPC (fin-Latn)": 90.3, - "OpusparcusPC (fra-Latn)": 92.12, - "OpusparcusPC (rus-Cyrl)": 86.82, - "OpusparcusPC (swe-Latn)": 93.05, - "OpusparcusPC (fr)": 92.72, - "PPC": 88.01, - "PSC (pol-Latn)": 99.14, - "PSC": 99.14, - "PawsXPairClassification (deu-Latn)": 54.11, - "PawsXPairClassification (en)": 55.79, - "PawsXPairClassification (spa-Latn)": 54.13, - "PawsXPairClassification (fra-Latn)": 56.01, - "PawsXPairClassification (jpn-Hira)": 49.02, - "PawsXPairClassification (kor-Hang)": 51.01, - "PawsXPairClassification (cmn-Hans)": 55.13, - "PawsXPairClassification (fr)": 56.93, - "SICK-E-PL (pol-Latn)": 68.76, - "SICK-E-PL": 68.77, - "SprintDuplicateQuestions": 93.02, - "TERRa (rus-Cyrl)": 54.96, - "TwitterSemEval2015": 72.21, - "TwitterURLCorpus": 85.48 + "Model": "gelectra-base" } ] }, "Reranking": { "map": [ { - "Model": "multilingual-e5-base", - "AlloprofReranking (fra-Latn)": 65.9, - "AlloprofReranking": 58.1, - "AskUbuntuDupQuestions": 59.28, - "CMedQAv1": 65.21, - "CMedQAv2": 66.06, - "MMarcoReranking (cmn-Hans)": 30.52, - "MMarcoReranking": 21.76, - "MindSmallReranking": 29.28, - "RuBQReranking (rus-Cyrl)": 72.01, - "SciDocsRR": 81.81, - "StackOverflowDupQuestions": 49.75, - "SyntecReranking (fra-Latn)": 85.31, - "SyntecReranking": 85.43, - "T2Reranking (cmn-Hans)": 64.86, - "T2Reranking": 64.39 + "Model": "gelectra-base" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "multilingual-e5-base", - "AILACasedocs": 26.05, - "AILAStatutes": 20.37, - "ARCChallenge": 9.61, - "AlloprofRetrieval (fra-Latn)": 34.45, - "AlloprofRetrieval": 36.21, - "AlphaNLI": 16.44, - "ArguAna": 44.21, - "ArguAna-PL (pol-Latn)": 42.86, - "ArguAna-PL": 42.81, - "BSARDRetrieval (fra-Latn)": 18.83, - "BSARDRetrieval": 0.0, - "CmedqaRetrieval (cmn-Hans)": 27.2, - "CmedqaRetrieval": 27.2, - "CovidRetrieval (cmn-Hans)": 73.48, - "CovidRetrieval": 73.45, - "DBPedia-PL": 30.23, - "DuRetrieval (cmn-Hans)": 81.66, - "DuRetrieval": 81.64, - "EcomRetrieval (cmn-Hans)": 54.01, - "EcomRetrieval": 54.17, - "FiQA-PL (pol-Latn)": 25.59, - "FiQA-PL": 25.52, - "FiQA2018": 38.15, - "GerDaLIRSmall (deu-Latn)": 15.3, - "HellaSwag": 24.79, - "HotpotQA-PL": 63.52, - "LEMBNarrativeQARetrieval": 23.6, - "LEMBNeedleRetrieval": 32.0, - "LEMBPasskeyRetrieval": 38.25, - "LEMBQMSumRetrieval": 25.16, - "LEMBSummScreenFDRetrieval": 68.21, - "LEMBWikimQARetrieval": 56.04, - "LeCaRDv2 (zho-Hans)": 59.0, - "LegalBenchConsumerContractsQA": 69.02, - "LegalBenchCorporateLobbying": 88.97, - "LegalQuAD (deu-Latn)": 47.85, - "LegalSummarization": 61.69, - "MMarcoRetrieval (cmn-Hans)": 76.01, - "MMarcoRetrieval": 76.04, - "MSMARCO-PL": 29.52, - "MedicalRetrieval (cmn-Hans)": 48.33, - "MedicalRetrieval": 48.35, - "MintakaRetrieval (ara-Arab)": 23.06, - "MintakaRetrieval (deu-Latn)": 29.8, - "MintakaRetrieval (spa-Latn)": 29.88, - "MintakaRetrieval (fra-Latn)": 30.96, - "MintakaRetrieval (hin-Deva)": 22.68, - "MintakaRetrieval (ita-Latn)": 29.77, - "MintakaRetrieval (jpn-Hira)": 22.98, - "MintakaRetrieval (por-Latn)": 30.62, - "MintakaRetrieval (fr)": 23.46, - "NFCorpus": 32.49, - "NFCorpus-PL (pol-Latn)": 25.99, - "NFCorpus-PL": 25.98, - "NQ-PL": 44.8, - "PIQA": 25.09, - "Quail": 3.52, - "Quora-PL": 81.22, - "RARbCode": 52.16, - "RARbMath": 65.35, - "RiaNewsRetrieval (rus-Cyrl)": 70.24, - "RuBQRetrieval (rus-Cyrl)": 69.58, - "SCIDOCS": 17.17, - "SCIDOCS-PL (pol-Latn)": 12.36, - "SCIDOCS-PL": 12.35, - "SIQA": 3.72, - "SciFact": 69.39, - "SciFact-PL (pol-Latn)": 62.26, - "SciFact-PL": 62.11, - "SpartQA": 7.91, - "SyntecRetrieval (fra-Latn)": 82.86, - "SyntecRetrieval": 80.49, - "T2Retrieval (cmn-Hans)": 70.77, - "T2Retrieval": 70.86, - "TRECCOVID": 69.5, - "TRECCOVID-PL (pol-Latn)": 65.94, - "TRECCOVID-PL": 66.06, - "TempReasonL1": 0.72, - "TempReasonL2Fact": 38.76, - "TempReasonL2Pure": 1.63, - "TempReasonL3Fact": 35.85, - "TempReasonL3Pure": 7.11, - "Touche2020": 21.5, - "VideoRetrieval (cmn-Hans)": 61.26, - "VideoRetrieval": 61.3, - "WinoGrande": 56.18, - "XPQARetrieval (ara-Arab_ara-Arab)": 39.97, - "XPQARetrieval (eng-Latn_ara-Arab)": 17.23, - "XPQARetrieval (ara-Arab_eng-Latn)": 34.35, - "XPQARetrieval (deu-Latn_deu-Latn)": 72.11, - "XPQARetrieval (eng-Latn_deu-Latn)": 28.91, - "XPQARetrieval (deu-Latn_eng-Latn)": 61.46, - "XPQARetrieval (spa-Latn_spa-Latn)": 58.35, - "XPQARetrieval (eng-Latn_spa-Latn)": 25.27, - "XPQARetrieval (spa-Latn_eng-Latn)": 51.07, - "XPQARetrieval (fra-Latn_fra-Latn)": 59.56, - "XPQARetrieval (eng-Latn_fra-Latn)": 23.69, - "XPQARetrieval (fra-Latn_eng-Latn)": 53.9, - "XPQARetrieval (hin-Deva_hin-Deva)": 70.56, - "XPQARetrieval (eng-Latn_hin-Deva)": 27.57, - "XPQARetrieval (hin-Deva_eng-Latn)": 63.68, - "XPQARetrieval (ita-Latn_ita-Latn)": 70.38, - "XPQARetrieval (eng-Latn_ita-Latn)": 26.06, - "XPQARetrieval (ita-Latn_eng-Latn)": 56.2, - "XPQARetrieval (jpn-Hira_jpn-Hira)": 71.97, - "XPQARetrieval (eng-Latn_jpn-Hira)": 17.63, - "XPQARetrieval (jpn-Hira_eng-Latn)": 61.03, - "XPQARetrieval (kor-Hang_kor-Hang)": 36.12, - "XPQARetrieval (eng-Latn_kor-Hang)": 20.27, - "XPQARetrieval (kor-Hang_eng-Latn)": 29.26, - "XPQARetrieval (pol-Latn_pol-Latn)": 48.1, - "XPQARetrieval (eng-Latn_pol-Latn)": 19.48, - "XPQARetrieval (pol-Latn_eng-Latn)": 40.18, - "XPQARetrieval (por-Latn_por-Latn)": 44.76, - "XPQARetrieval (eng-Latn_por-Latn)": 17.66, - "XPQARetrieval (por-Latn_eng-Latn)": 40.52, - "XPQARetrieval (tam-Taml_tam-Taml)": 35.25, - "XPQARetrieval (eng-Latn_tam-Taml)": 12.64, - "XPQARetrieval (tam-Taml_eng-Latn)": 26.73, - "XPQARetrieval (cmn-Hans_cmn-Hans)": 67.06, - "XPQARetrieval (eng-Latn_cmn-Hans)": 12.72, - "XPQARetrieval (cmn-Hans_eng-Latn)": 53.53, - "XPQARetrieval (fr)": 65.81 + "Model": "gelectra-base" } ] }, "STS": { "spearman": [ { - "Model": "multilingual-e5-base", - "AFQMC (cmn-Hans)": 29.66, - "AFQMC": 29.67, - "ATEC (cmn-Hans)": 37.01, - "ATEC": 37.01, - "BIOSSES": 85.05, - "BQ (cmn-Hans)": 45.45, - "BQ": 45.45, - "CDSC-R (pol-Latn)": 90.09, - "CDSC-R": 90.08, - "LCQMC (cmn-Hans)": 74.15, - "LCQMC": 74.15, - "PAWSX (cmn-Hans)": 12.13, - "PAWSX": 12.14, - "QBQTC": 28.81, - "RUParaPhraserSTS (rus-Cyrl)": 70.17, - "RuSTSBenchmarkSTS (rus-Cyrl)": 79.64, - "SICK-R": 78.51, - "SICK-R-PL (pol-Latn)": 71.23, - "SICK-R-PL": 71.23, - "SICKFr (fra-Latn)": 75.76, - "SICKFr": 76.23, - "STS12": 76.7, - "STS13": 78.02, - "STS14": 76.6, - "STS15": 88.16, - "STS16": 84.28, - "STS17 (fra-Latn_eng-Latn)": 80.18, - "STS17 (ita-Latn_eng-Latn)": 80.16, - "STS17 (eng-Latn_ara-Arab)": 71.27, - "STS17 (kor-Hang)": 79.95, - "STS17 (eng-Latn_tur-Latn)": 63.3, - "STS17 (spa-Latn_eng-Latn)": 76.56, - "STS17 (spa-Latn)": 86.74, - "STS17 (en-en)": 87.84, - "STS17 (ara-Arab)": 74.48, - "STS17 (nld-Latn_eng-Latn)": 79.29, - "STS17 (eng-Latn_deu-Latn)": 82.08, - "STS22 (fra-Latn)": 75.04, - "STS22 (ara-Arab)": 57.82, - "STS22 (en)": 62.26, - "STS22 (spa-Latn)": 66.67, - "STS22 (fra-Latn_pol-Latn)": 73.25, - "STS22 (ita-Latn)": 77.76, - "STS22 (pol-Latn_eng-Latn)": 70.37, - "STS22 (tur-Latn)": 63.71, - "STS22 (rus-Cyrl)": 60.67, - "STS22 (deu-Latn)": 55.95, - "STS22 (deu-Latn_fra-Latn)": 59.68, - "STS22 (spa-Latn_eng-Latn)": 74.0, - "STS22 (cmn-Hans_eng-Latn)": 69.8, - "STS22 (pol-Latn)": 34.08, - "STS22 (spa-Latn_ita-Latn)": 66.43, - "STS22 (cmn-Hans)": 65.63, - "STS22 (deu-Latn_pol-Latn)": 39.35, - "STS22 (deu-Latn_eng-Latn)": 54.89, - "STS22 (zh)": 65.64, - "STS22 (pl)": 34.07, - "STSB (cmn-Hans)": 79.04, - "STSB": 79.05, - "STSBenchmark": 85.64, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 79.33, - "STSBenchmarkMultilingualSTS (pol-Latn)": 74.93, - "STSBenchmarkMultilingualSTS (spa-Latn)": 81.75, - "STSBenchmarkMultilingualSTS (en)": 85.64, - "STSBenchmarkMultilingualSTS (cmn-Hans)": 79.87, - "STSBenchmarkMultilingualSTS (fra-Latn)": 80.85, - "STSBenchmarkMultilingualSTS (deu-Latn)": 79.68, - "STSBenchmarkMultilingualSTS (nld-Latn)": 75.96, - "STSBenchmarkMultilingualSTS (por-Latn)": 67.16, - "STSBenchmarkMultilingualSTS (ita-Latn)": 78.09, - "STSBenchmarkMultilingualSTS (fr)": 80.62 + "Model": "gelectra-base" } ] }, "Summarization": { "spearman": [ { - "Model": "multilingual-e5-base", - "SummEval": 30.23, - "SummEvalFr (fra-Latn)": 32.96, - "SummEvalFr": 30.76 + "Model": "gelectra-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "multilingual-e5-base" + "Model": "gelectra-base" } ] } }, - "norbert3-base": { + "e5-base": { "BitextMining": { "f1": [ { - "Model": "norbert3-base", - "BornholmBitextMining": 6.08 + "Model": "e5-base", + "BornholmBitextMining": 40.09 } ] }, "Classification": { "accuracy": [ { - "Model": "norbert3-base", - "AngryTweetsClassification": 52.48, - "DKHateClassification": 58.78, - "DanishPoliticalCommentsClassification": 34.14, - "LccSentimentClassification": 54.07, - "MassiveIntentClassification (da)": 53.16, - "MassiveIntentClassification (nb)": 54.2, - "MassiveIntentClassification (sv)": 52.08, - "MassiveScenarioClassification (da)": 57.17, - "MassiveScenarioClassification (nb)": 60.69, - "MassiveScenarioClassification (sv)": 53.53, - "NoRecClassification": 53.4, - "NordicLangClassification": 82.67, - "NorwegianParliament": 59.33, - "ScalaDaClassification": 58.25, - "ScalaNbClassification": 60.19 + "Model": "e5-base", + "AngryTweetsClassification": 45.06, + "DKHateClassification": 58.51, + "DanishPoliticalCommentsClassification": 28.43, + "LccSentimentClassification": 37.47, + "MassiveIntentClassification (da)": 44.25, + "MassiveIntentClassification (nb)": 41.57, + "MassiveIntentClassification (sv)": 41.34, + "MassiveScenarioClassification (da)": 52.99, + "MassiveScenarioClassification (nb)": 50.33, + "MassiveScenarioClassification (sv)": 50.0, + "NoRecClassification": 42.0, + "NordicLangClassification": 59.34, + "NorwegianParliament": 57.42, + "ScalaDaClassification": 50.08, + "ScalaNbClassification": 50.18 } ] }, "Clustering": { "v_measure": [ { - "Model": "norbert3-base" + "Model": "e5-base" } ] }, "PairClassification": { "ap": [ { - "Model": "norbert3-base" + "Model": "e5-base" } ] }, "Reranking": { "map": [ { - "Model": "norbert3-base" + "Model": "e5-base" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "norbert3-base" + "Model": "e5-base", + "LEMBNarrativeQARetrieval": 25.31, + "LEMBNeedleRetrieval": 28.5, + "LEMBPasskeyRetrieval": 33.25, + "LEMBQMSumRetrieval": 23.83, + "LEMBSummScreenFDRetrieval": 74.67, + "LEMBWikimQARetrieval": 55.85 } ] }, "STS": { "spearman": [ { - "Model": "norbert3-base" + "Model": "e5-base" } ] }, "Summarization": { "spearman": [ { - "Model": "norbert3-base" + "Model": "e5-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "norbert3-base" + "Model": "e5-base" } ] } }, - "voyage-code-2": { + "text-similarity-ada-001": { "BitextMining": { "f1": [ { - "Model": "voyage-code-2" + "Model": "text-similarity-ada-001" } ] }, "Classification": { "accuracy": [ { - "Model": "voyage-code-2", - "AmazonReviewsClassification (fr)": 42.15, - "MTOPDomainClassification (fr)": 87.68, - "MTOPIntentClassification (fr)": 59.44, - "MasakhaNEWSClassification (fra)": 82.13, - "MassiveIntentClassification (fr)": 63.08, - "MassiveScenarioClassification (fr)": 70.15 + "Model": "text-similarity-ada-001", + "AmazonCounterfactualClassification (en)": 76.4, + "AmazonPolarityClassification": 92.83, + "AmazonReviewsClassification (en)": 47.45, + "Banking77Classification": 68.04, + "EmotionClassification": 50.33, + "ImdbClassification": 89.38, + "MTOPDomainClassification (en)": 89.89, + "MTOPIntentClassification (en)": 64.8, + "MassiveIntentClassification (en)": 65.17, + "MassiveScenarioClassification (en)": 67.67, + "ToxicConversationsClassification": 70.0, + "TweetSentimentExtractionClassification": 63.35 } ] }, "Clustering": { "v_measure": [ { - "Model": "voyage-code-2", - "AlloProfClusteringP2P": 61.63, - "AlloProfClusteringS2S": 50.67, - "HALClusteringS2S": 27.44, - "MLSUMClusteringP2P": 45.23, - "MLSUMClusteringS2S": 41.48, - "MasakhaNEWSClusteringP2P (fra)": 56.59, - "MasakhaNEWSClusteringS2S (fra)": 35.18 + "Model": "text-similarity-ada-001", + "ArxivClusteringP2P": 41.49, + "ArxivClusteringS2S": 28.47, + "BiorxivClusteringP2P": 36.86, + "BiorxivClusteringS2S": 27.55, + "MedrxivClusteringP2P": 31.09, + "MedrxivClusteringS2S": 26.5, + "RedditClustering": 42.47, + "RedditClusteringP2P": 58.1, + "StackExchangeClustering": 53.52, + "StackExchangeClusteringP2P": 30.43, + "TwentyNewsgroupsClustering": 36.26 } ] }, "PairClassification": { "ap": [ { - "Model": "voyage-code-2", - "OpusparcusPC (fr)": 92.87, - "PawsXPairClassification (fr)": 60.83 + "Model": "text-similarity-ada-001", + "SprintDuplicateQuestions": 77.85, + "TwitterSemEval2015": 69.04, + "TwitterURLCorpus": 83.69 } ] }, "Reranking": { "map": [ { - "Model": "voyage-code-2", - "AlloprofReranking": 70.79, - "SyntecReranking": 86.77 + "Model": "text-similarity-ada-001", + "AskUbuntuDupQuestions": 53.49, + "MindSmallReranking": 30.71, + "SciDocsRR": 71.04, + "StackOverflowDupQuestions": 40.85 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "voyage-code-2", - "AlloprofRetrieval": 52.61, - "BSARDRetrieval": 0.29, - "MintakaRetrieval (fr)": 19.05, - "SyntecRetrieval": 82.77, - "XPQARetrieval (fr)": 71.95 + "Model": "text-similarity-ada-001", + "ArguAna": 39.65, + "CQADupstackRetrieval": 10.17, + "ClimateFEVER": 2.83, + "DBPedia": 3.48, + "FEVER": 4.45, + "FiQA2018": 7.54, + "HotpotQA": 12.6, + "MSMARCO": 10.53, + "NFCorpus": 20.59, + "NQ": 2.02, + "QuoraRetrieval": 82.18, + "SCIDOCS": 6.28, + "SciFact": 45.46, + "TRECCOVID": 24.56, + "Touche2020": 3.1 } ] }, "STS": { "spearman": [ { - "Model": "voyage-code-2", - "SICKFr": 73.56, - "STS22 (fr)": 79.99, - "STSBenchmarkMultilingualSTS (fr)": 79.02 + "Model": "text-similarity-ada-001", + "BIOSSES": 78.04, + "SICK-R": 77.48, + "STS12": 72.3, + "STS13": 81.49, + "STS14": 74.74, + "STS15": 84.28, + "STS16": 82.06, + "STS17 (en-en)": 87.08, + "STS22 (en)": 64.71, + "STSBenchmark": 83.78 } ] }, "Summarization": { "spearman": [ { - "Model": "voyage-code-2", - "SummEvalFr": 28.34 + "Model": "text-similarity-ada-001", + "SummEval": 26.94 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "voyage-code-2" + "Model": "text-similarity-ada-001" } ] } }, - "mistral-7b-instruct-v0.2": { + "text-search-ada-doc-001": { "BitextMining": { "f1": [ { - "Model": "mistral-7b-instruct-v0.2" + "Model": "text-search-ada-doc-001" } ] }, "Classification": { "accuracy": [ { - "Model": "mistral-7b-instruct-v0.2" + "Model": "text-search-ada-doc-001" } ] }, "Clustering": { "v_measure": [ { - "Model": "mistral-7b-instruct-v0.2" + "Model": "text-search-ada-doc-001", + "TwentyNewsgroupsClustering": 32.92 } ] }, "PairClassification": { "ap": [ { - "Model": "mistral-7b-instruct-v0.2" + "Model": "text-search-ada-doc-001" } ] }, "Reranking": { "map": [ { - "Model": "mistral-7b-instruct-v0.2" + "Model": "text-search-ada-doc-001" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "mistral-7b-instruct-v0.2" + "Model": "text-search-ada-doc-001" } ] }, "STS": { "spearman": [ { - "Model": "mistral-7b-instruct-v0.2" + "Model": "text-search-ada-doc-001" } ] }, "Summarization": { "spearman": [ { - "Model": "mistral-7b-instruct-v0.2" + "Model": "text-search-ada-doc-001" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "mistral-7b-instruct-v0.2", - "Core17InstructionRetrieval": 13.03, - "News21InstructionRetrieval": 4.81, - "Robust04InstructionRetrieval": 12.61 + "Model": "text-search-ada-doc-001" } ] } }, - "sentence-t5-large": { + "LLM2Vec-Mistral-unsupervised": { "BitextMining": { "f1": [ { - "Model": "sentence-t5-large", - "BUCC (de-en)": 87.0, - "BUCC (fr-en)": 88.91, - "BUCC (ru-en)": 0.44, - "BUCC (zh-en)": 0.95, - "Tatoeba (afr-eng)": 23.7, - "Tatoeba (amh-eng)": 0.65, - "Tatoeba (ang-eng)": 30.98, - "Tatoeba (ara-eng)": 0.48, - "Tatoeba (arq-eng)": 0.68, - "Tatoeba (arz-eng)": 0.22, - "Tatoeba (ast-eng)": 55.3, - "Tatoeba (awa-eng)": 1.03, - "Tatoeba (aze-eng)": 5.83, - "Tatoeba (bel-eng)": 1.66, - "Tatoeba (ben-eng)": 0.0, - "Tatoeba (ber-eng)": 5.62, - "Tatoeba (bos-eng)": 12.23, - "Tatoeba (bre-eng)": 5.84, - "Tatoeba (bul-eng)": 1.35, - "Tatoeba (cat-eng)": 48.56, - "Tatoeba (cbk-eng)": 46.97, - "Tatoeba (ceb-eng)": 9.79, - "Tatoeba (ces-eng)": 6.0, - "Tatoeba (cha-eng)": 24.21, - "Tatoeba (cmn-eng)": 2.26, - "Tatoeba (cor-eng)": 4.03, - "Tatoeba (csb-eng)": 9.53, - "Tatoeba (cym-eng)": 9.17, - "Tatoeba (dan-eng)": 34.63, - "Tatoeba (deu-eng)": 89.31, - "Tatoeba (dsb-eng)": 9.68, - "Tatoeba (dtp-eng)": 4.66, - "Tatoeba (ell-eng)": 0.77, - "Tatoeba (epo-eng)": 26.88, - "Tatoeba (est-eng)": 5.19, - "Tatoeba (eus-eng)": 9.46, - "Tatoeba (fao-eng)": 21.59, - "Tatoeba (fin-eng)": 5.66, - "Tatoeba (fra-eng)": 79.71, - "Tatoeba (fry-eng)": 28.29, - "Tatoeba (gla-eng)": 2.34, - "Tatoeba (gle-eng)": 3.55, - "Tatoeba (glg-eng)": 56.25, - "Tatoeba (gsw-eng)": 24.25, - "Tatoeba (heb-eng)": 0.57, - "Tatoeba (hin-eng)": 0.12, - "Tatoeba (hrv-eng)": 10.29, - "Tatoeba (hsb-eng)": 9.52, - "Tatoeba (hun-eng)": 6.22, - "Tatoeba (hye-eng)": 0.81, - "Tatoeba (ido-eng)": 41.11, - "Tatoeba (ile-eng)": 54.0, - "Tatoeba (ina-eng)": 75.47, - "Tatoeba (ind-eng)": 13.02, - "Tatoeba (isl-eng)": 8.98, - "Tatoeba (ita-eng)": 67.23, - "Tatoeba (jav-eng)": 8.54, - "Tatoeba (jpn-eng)": 0.99, - "Tatoeba (kab-eng)": 1.85, - "Tatoeba (kat-eng)": 1.37, - "Tatoeba (kaz-eng)": 0.67, - "Tatoeba (khm-eng)": 0.56, - "Tatoeba (kor-eng)": 1.73, - "Tatoeba (kur-eng)": 9.23, - "Tatoeba (kzj-eng)": 5.38, - "Tatoeba (lat-eng)": 21.3, - "Tatoeba (lfn-eng)": 40.48, - "Tatoeba (lit-eng)": 5.38, - "Tatoeba (lvs-eng)": 6.83, - "Tatoeba (mal-eng)": 0.45, - "Tatoeba (mar-eng)": 0.01, - "Tatoeba (max-eng)": 16.44, - "Tatoeba (mhr-eng)": 0.33, - "Tatoeba (mkd-eng)": 0.4, - "Tatoeba (mon-eng)": 2.48, - "Tatoeba (nds-eng)": 34.66, - "Tatoeba (nld-eng)": 42.72, - "Tatoeba (nno-eng)": 24.08, - "Tatoeba (nob-eng)": 34.17, - "Tatoeba (nov-eng)": 55.01, - "Tatoeba (oci-eng)": 29.15, - "Tatoeba (orv-eng)": 0.2, - "Tatoeba (pam-eng)": 6.99, - "Tatoeba (pes-eng)": 0.9, - "Tatoeba (pms-eng)": 30.8, - "Tatoeba (pol-eng)": 12.81, - "Tatoeba (por-eng)": 73.45, - "Tatoeba (ron-eng)": 54.86, - "Tatoeba (rus-eng)": 2.43, - "Tatoeba (slk-eng)": 8.35, - "Tatoeba (slv-eng)": 9.3, - "Tatoeba (spa-eng)": 78.87, - "Tatoeba (sqi-eng)": 11.74, - "Tatoeba (srp-eng)": 5.83, - "Tatoeba (swe-eng)": 35.41, - "Tatoeba (swg-eng)": 28.18, - "Tatoeba (swh-eng)": 7.53, - "Tatoeba (tam-eng)": 0.36, - "Tatoeba (tat-eng)": 1.01, - "Tatoeba (tel-eng)": 1.1, - "Tatoeba (tgl-eng)": 12.4, - "Tatoeba (tha-eng)": 1.58, - "Tatoeba (tuk-eng)": 4.95, - "Tatoeba (tur-eng)": 6.45, - "Tatoeba (tzl-eng)": 37.82, - "Tatoeba (uig-eng)": 0.67, - "Tatoeba (ukr-eng)": 1.88, - "Tatoeba (urd-eng)": 0.0, - "Tatoeba (uzb-eng)": 4.79, - "Tatoeba (vie-eng)": 7.03, - "Tatoeba (war-eng)": 9.68, - "Tatoeba (wuu-eng)": 1.28, - "Tatoeba (xho-eng)": 10.64, - "Tatoeba (yid-eng)": 0.57, - "Tatoeba (yue-eng)": 0.88, - "Tatoeba (zsm-eng)": 14.67 + "Model": "LLM2Vec-Mistral-unsupervised" } ] }, "Classification": { "accuracy": [ { - "Model": "sentence-t5-large", - "AmazonCounterfactualClassification (de)": 67.97, - "AmazonCounterfactualClassification (en)": 75.51, - "AmazonCounterfactualClassification (en-ext)": 75.44, - "AmazonCounterfactualClassification (ja)": 45.72, - "AmazonPolarityClassification": 92.87, - "AmazonReviewsClassification (de)": 43.16, - "AmazonReviewsClassification (en)": 47.12, - "AmazonReviewsClassification (es)": 42.89, - "AmazonReviewsClassification (fr)": 41.48, - "AmazonReviewsClassification (ja)": 22.49, - "AmazonReviewsClassification (zh)": 22.12, - "Banking77Classification": 78.46, - "EmotionClassification": 51.74, - "ImdbClassification": 87.01, - "MTOPDomainClassification (de)": 80.56, - "MTOPDomainClassification (en)": 90.99, - "MTOPDomainClassification (es)": 80.78, - "MTOPDomainClassification (fr)": 79.6, - "MTOPDomainClassification (hi)": 21.22, - "MTOPDomainClassification (th)": 15.82, - "MTOPIntentClassification (de)": 52.5, - "MTOPIntentClassification (en)": 64.98, - "MTOPIntentClassification (es)": 52.07, - "MTOPIntentClassification (fr)": 47.73, - "MTOPIntentClassification (hi)": 3.74, - "MTOPIntentClassification (th)": 4.96, - "MasakhaNEWSClassification (fra)": 80.43, - "MassiveIntentClassification (af)": 38.41, - "MassiveIntentClassification (am)": 2.49, - "MassiveIntentClassification (ar)": 4.7, - "MassiveIntentClassification (az)": 31.77, - "MassiveIntentClassification (bn)": 2.77, - "MassiveIntentClassification (cy)": 31.69, - "MassiveIntentClassification (da)": 41.76, - "MassiveIntentClassification (de)": 52.01, - "MassiveIntentClassification (el)": 9.74, - "MassiveIntentClassification (en)": 71.78, - "MassiveIntentClassification (es)": 54.1, - "MassiveIntentClassification (fa)": 3.86, - "MassiveIntentClassification (fi)": 34.07, - "MassiveIntentClassification (fr)": 57.01, - "MassiveIntentClassification (he)": 2.14, - "MassiveIntentClassification (hi)": 2.97, - "MassiveIntentClassification (hu)": 32.01, - "MassiveIntentClassification (hy)": 3.17, - "MassiveIntentClassification (id)": 34.55, - "MassiveIntentClassification (is)": 32.0, - "MassiveIntentClassification (it)": 52.94, - "MassiveIntentClassification (ja)": 2.9, - "MassiveIntentClassification (jv)": 32.42, - "MassiveIntentClassification (ka)": 2.71, - "MassiveIntentClassification (km)": 5.5, - "MassiveIntentClassification (kn)": 2.41, - "MassiveIntentClassification (ko)": 2.57, - "MassiveIntentClassification (lv)": 35.09, - "MassiveIntentClassification (ml)": 2.95, - "MassiveIntentClassification (mn)": 18.33, - "MassiveIntentClassification (ms)": 29.69, - "MassiveIntentClassification (my)": 3.99, - "MassiveIntentClassification (nb)": 41.29, - "MassiveIntentClassification (nl)": 44.95, - "MassiveIntentClassification (pl)": 37.67, - "MassiveIntentClassification (pt)": 51.96, - "MassiveIntentClassification (ro)": 43.83, - "MassiveIntentClassification (ru)": 17.32, - "MassiveIntentClassification (sl)": 33.71, - "MassiveIntentClassification (sq)": 37.62, - "MassiveIntentClassification (sv)": 40.67, - "MassiveIntentClassification (sw)": 31.9, - "MassiveIntentClassification (ta)": 1.91, - "MassiveIntentClassification (te)": 2.54, - "MassiveIntentClassification (th)": 3.85, - "MassiveIntentClassification (tl)": 36.83, - "MassiveIntentClassification (tr)": 33.0, - "MassiveIntentClassification (ur)": 2.62, - "MassiveIntentClassification (vi)": 22.81, - "MassiveIntentClassification (zh-CN)": 1.09, - "MassiveIntentClassification (zh-TW)": 3.49, - "MassiveScenarioClassification (af)": 50.28, - "MassiveScenarioClassification (am)": 7.15, - "MassiveScenarioClassification (ar)": 12.12, - "MassiveScenarioClassification (az)": 39.68, - "MassiveScenarioClassification (bn)": 8.06, - "MassiveScenarioClassification (cy)": 38.01, - "MassiveScenarioClassification (da)": 51.44, - "MassiveScenarioClassification (de)": 62.71, - "MassiveScenarioClassification (el)": 17.19, - "MassiveScenarioClassification (en)": 73.16, - "MassiveScenarioClassification (es)": 59.56, - "MassiveScenarioClassification (fa)": 6.5, - "MassiveScenarioClassification (fi)": 41.72, - "MassiveScenarioClassification (fr)": 63.6, - "MassiveScenarioClassification (he)": 7.93, - "MassiveScenarioClassification (hi)": 7.85, - "MassiveScenarioClassification (hu)": 41.37, - "MassiveScenarioClassification (hy)": 9.42, - "MassiveScenarioClassification (id)": 44.88, - "MassiveScenarioClassification (is)": 40.86, - "MassiveScenarioClassification (it)": 60.09, - "MassiveScenarioClassification (ja)": 6.56, - "MassiveScenarioClassification (jv)": 40.18, - "MassiveScenarioClassification (ka)": 7.37, - "MassiveScenarioClassification (km)": 9.56, - "MassiveScenarioClassification (kn)": 8.4, - "MassiveScenarioClassification (ko)": 5.96, - "MassiveScenarioClassification (lv)": 41.44, - "MassiveScenarioClassification (ml)": 7.47, - "MassiveScenarioClassification (mn)": 25.36, - "MassiveScenarioClassification (ms)": 39.69, - "MassiveScenarioClassification (my)": 9.68, - "MassiveScenarioClassification (nb)": 49.92, - "MassiveScenarioClassification (nl)": 56.09, - "MassiveScenarioClassification (pl)": 45.2, - "MassiveScenarioClassification (pt)": 57.99, - "MassiveScenarioClassification (ro)": 56.0, - "MassiveScenarioClassification (ru)": 27.47, - "MassiveScenarioClassification (sl)": 41.04, - "MassiveScenarioClassification (sq)": 49.38, - "MassiveScenarioClassification (sv)": 50.97, - "MassiveScenarioClassification (sw)": 40.62, - "MassiveScenarioClassification (ta)": 7.59, - "MassiveScenarioClassification (te)": 7.07, - "MassiveScenarioClassification (th)": 8.52, - "MassiveScenarioClassification (tl)": 49.89, - "MassiveScenarioClassification (tr)": 43.08, - "MassiveScenarioClassification (ur)": 9.31, - "MassiveScenarioClassification (vi)": 27.46, - "MassiveScenarioClassification (zh-CN)": 4.7, - "MassiveScenarioClassification (zh-TW)": 7.24, - "ToxicConversationsClassification": 71.73, - "TweetSentimentExtractionClassification": 62.33 + "Model": "LLM2Vec-Mistral-unsupervised", + "AmazonCounterfactualClassification (en)": 76.94, + "AmazonPolarityClassification": 85.29, + "AmazonReviewsClassification (en)": 47.09, + "Banking77Classification": 86.16, + "EmotionClassification": 48.88, + "ImdbClassification": 77.95, + "MTOPDomainClassification (en)": 95.48, + "MTOPIntentClassification (en)": 82.84, + "MassiveIntentClassification (en)": 76.65, + "MassiveScenarioClassification (en)": 79.99, + "ToxicConversationsClassification": 70.71, + "TweetSentimentExtractionClassification": 60.9 } ] }, "Clustering": { "v_measure": [ { - "Model": "sentence-t5-large", - "AlloProfClusteringP2P": 61.82, - "AlloProfClusteringS2S": 39.78, - "ArxivClusteringP2P": 41.62, - "ArxivClusteringS2S": 29.44, - "BiorxivClusteringP2P": 35.99, - "BiorxivClusteringS2S": 24.02, - "BlurbsClusteringP2P": 35.33, - "BlurbsClusteringS2S": 13.27, - "HALClusteringS2S": 18.73, - "MLSUMClusteringP2P": 42.07, - "MLSUMClusteringS2S": 31.87, - "MasakhaNEWSClusteringP2P (fra)": 58.6, - "MasakhaNEWSClusteringS2S (fra)": 31.33, - "MedrxivClusteringP2P": 32.4, - "MedrxivClusteringS2S": 26.33, - "RedditClustering": 54.53, - "RedditClusteringP2P": 62.5, - "StackExchangeClustering": 65.11, - "StackExchangeClusteringP2P": 36.86, - "TenKGnadClusteringP2P": 44.11, - "TenKGnadClusteringS2S": 17.26, - "TwentyNewsgroupsClustering": 49.33 + "Model": "LLM2Vec-Mistral-unsupervised", + "ArxivClusteringP2P": 47.56, + "ArxivClusteringS2S": 39.92, + "BiorxivClusteringP2P": 36.14, + "BiorxivClusteringS2S": 30.26, + "MedrxivClusteringP2P": 30.11, + "MedrxivClusteringS2S": 26.93, + "RedditClustering": 41.83, + "RedditClusteringP2P": 62.08, + "StackExchangeClustering": 67.34, + "StackExchangeClusteringP2P": 34.5, + "TwentyNewsgroupsClustering": 30.26 } ] }, "PairClassification": { "ap": [ { - "Model": "sentence-t5-large", - "OpusparcusPC (fr)": 91.19, - "PawsXPairClassification (fr)": 59.59, - "SprintDuplicateQuestions": 89.01, - "TwitterSemEval2015": 79.75, - "TwitterURLCorpus": 86.14 + "Model": "LLM2Vec-Mistral-unsupervised", + "SprintDuplicateQuestions": 91.3, + "TwitterSemEval2015": 68.76, + "TwitterURLCorpus": 82.76 } ] }, "Reranking": { "map": [ { - "Model": "sentence-t5-large", - "AlloprofReranking": 57.99, - "AskUbuntuDupQuestions": 61.51, - "MindSmallReranking": 30.27, - "SciDocsRR": 74.88, - "StackOverflowDupQuestions": 49.34, - "SyntecReranking": 79.77 + "Model": "LLM2Vec-Mistral-unsupervised", + "AskUbuntuDupQuestions": 58.6, + "MindSmallReranking": 29.73, + "SciDocsRR": 77.81, + "StackOverflowDupQuestions": 49.8 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "sentence-t5-large", - "AlloprofRetrieval": 34.52, - "ArguAna": 39.27, - "BSARDRetrieval": 0.0, - "CQADupstackRetrieval": 38.96, - "ClimateFEVER": 11.36, - "DBPedia": 31.55, - "FEVER": 36.21, - "FiQA2018": 43.55, - "HotpotQA": 33.95, - "MSMARCO": 23.96, - "MintakaRetrieval (fr)": 23.92, - "NFCorpus": 31.1, - "NQ": 42.02, - "QuoraRetrieval": 85.73, - "SCIDOCS": 15.38, - "SciFact": 49.91, - "SyntecRetrieval": 71.05, - "TRECCOVID": 46.11, - "Touche2020": 21.63, - "XPQARetrieval (fr)": 48.79 + "Model": "LLM2Vec-Mistral-unsupervised", + "ArguAna": 51.0, + "CQADupstackRetrieval": 33.37, + "ClimateFEVER": 22.97, + "DBPedia": 25.48, + "FEVER": 45.11, + "FiQA2018": 27.24, + "HotpotQA": 54.54, + "MSMARCO": 19.13, + "NFCorpus": 27.16, + "NQ": 34.16, + "QuoraRetrieval": 84.4, + "SCIDOCS": 15.35, + "SciFact": 68.68, + "TRECCOVID": 55.67, + "Touche2020": 6.54 } ] }, "STS": { "spearman": [ { - "Model": "sentence-t5-large", - "BIOSSES": 78.93, - "SICK-R": 80.34, - "SICKFr": 72.83, - "STS12": 79.11, - "STS13": 87.33, - "STS14": 83.17, - "STS15": 88.28, - "STS16": 84.36, - "STS17 (ar-ar)": 10.75, - "STS17 (en-ar)": -4.71, - "STS17 (en-de)": 73.62, - "STS17 (en-en)": 88.99, - "STS17 (en-tr)": -0.42, - "STS17 (es-en)": 62.62, - "STS17 (es-es)": 82.74, - "STS17 (fr-en)": 67.86, - "STS17 (it-en)": 51.86, - "STS17 (ko-ko)": 9.44, - "STS17 (nl-en)": 45.95, - "STS22 (ar)": 27.01, - "STS22 (de)": 43.73, - "STS22 (de-en)": 49.93, - "STS22 (de-fr)": 61.58, - "STS22 (de-pl)": 38.83, - "STS22 (en)": 62.39, - "STS22 (es)": 57.68, - "STS22 (es-en)": 68.09, - "STS22 (es-it)": 61.58, - "STS22 (fr)": 75.01, - "STS22 (fr-pl)": 5.63, - "STS22 (it)": 62.01, - "STS22 (pl)": 25.0, - "STS22 (pl-en)": 51.72, - "STS22 (ru)": 14.21, - "STS22 (tr)": 47.3, - "STS22 (zh)": 30.47, - "STS22 (zh-en)": 23.1, - "STSBenchmark": 85.36, - "STSBenchmarkMultilingualSTS (fr)": 77.59 + "Model": "LLM2Vec-Mistral-unsupervised", + "BIOSSES": 83.29, + "SICK-R": 75.55, + "STS12": 67.65, + "STS13": 83.9, + "STS14": 76.97, + "STS15": 83.8, + "STS16": 81.91, + "STS17 (en-en)": 85.58, + "STS22 (en)": 65.93, + "STSBenchmark": 80.42 } ] }, "Summarization": { "spearman": [ { - "Model": "sentence-t5-large", - "SummEval": 29.64, - "SummEvalFr": 30.23 + "Model": "LLM2Vec-Mistral-unsupervised", + "SummEval": 30.19 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "sentence-t5-large" + "Model": "LLM2Vec-Mistral-unsupervised" } ] } @@ -3929,1253 +2417,1259 @@ ] } }, - "bge-base-en-v1.5": { + "sentence-t5-xxl": { "BitextMining": { "f1": [ { - "Model": "bge-base-en-v1.5" + "Model": "sentence-t5-xxl" } ] }, "Classification": { "accuracy": [ { - "Model": "bge-base-en-v1.5" + "Model": "sentence-t5-xxl", + "AmazonCounterfactualClassification (en)": 77.07, + "AmazonPolarityClassification": 92.79, + "AmazonReviewsClassification (en)": 48.93, + "AmazonReviewsClassification (fr)": 46.09, + "Banking77Classification": 82.31, + "EmotionClassification": 48.57, + "ImdbClassification": 90.23, + "MTOPDomainClassification (en)": 92.49, + "MTOPDomainClassification (fr)": 86.2, + "MTOPIntentClassification (en)": 68.33, + "MTOPIntentClassification (fr)": 58.33, + "MasakhaNEWSClassification (fra)": 79.1, + "MassiveIntentClassification (en)": 73.44, + "MassiveIntentClassification (fr)": 65.91, + "MassiveScenarioClassification (en)": 74.82, + "MassiveScenarioClassification (fr)": 68.53, + "ToxicConversationsClassification": 70.04, + "TweetSentimentExtractionClassification": 62.01 } ] }, "Clustering": { "v_measure": [ { - "Model": "bge-base-en-v1.5", - "BiorxivClusteringP2P": 39.44, - "BiorxivClusteringS2S": 36.62, - "MedrxivClusteringP2P": 33.21, - "MedrxivClusteringS2S": 31.68, - "RedditClustering": 56.61, - "RedditClusteringP2P": 62.66, - "StackExchangeClustering": 66.11, - "StackExchangeClusteringP2P": 35.24, - "TwentyNewsgroupsClustering": 50.75 + "Model": "sentence-t5-xxl", + "AlloProfClusteringP2P": 60.98, + "AlloProfClusteringS2S": 43.5, + "ArxivClusteringP2P": 42.89, + "ArxivClusteringS2S": 33.47, + "BiorxivClusteringP2P": 36.53, + "BiorxivClusteringS2S": 28.66, + "BlurbsClusteringP2P": 39.91, + "BlurbsClusteringS2S": 15.94, + "HALClusteringS2S": 21.4, + "MLSUMClusteringP2P": 42.24, + "MLSUMClusteringS2S": 35.25, + "MasakhaNEWSClusteringP2P (fra)": 61.15, + "MasakhaNEWSClusteringS2S (fra)": 38.24, + "MedrxivClusteringP2P": 32.09, + "MedrxivClusteringS2S": 26.82, + "RedditClustering": 58.99, + "RedditClusteringP2P": 64.46, + "StackExchangeClustering": 70.78, + "StackExchangeClusteringP2P": 35.25, + "TenKGnadClusteringP2P": 43.43, + "TenKGnadClusteringS2S": 19.69, + "TwentyNewsgroupsClustering": 50.93 } ] }, "PairClassification": { "ap": [ { - "Model": "bge-base-en-v1.5" + "Model": "sentence-t5-xxl", + "OpusparcusPC (fr)": 93.94, + "PawsXPairClassification (fr)": 63.98, + "SprintDuplicateQuestions": 88.89, + "TwitterSemEval2015": 80.28, + "TwitterURLCorpus": 86.01 } ] }, "Reranking": { "map": [ { - "Model": "bge-base-en-v1.5" + "Model": "sentence-t5-xxl", + "AlloprofReranking": 68.36, + "AskUbuntuDupQuestions": 66.16, + "MindSmallReranking": 30.6, + "SciDocsRR": 76.09, + "StackOverflowDupQuestions": 52.85, + "SyntecReranking": 85.15 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "bge-base-en-v1.5", - "ARCChallenge": 9.66, - "AlphaNLI": 10.99, - "HellaSwag": 26.64, - "PIQA": 25.69, - "Quail": 1.42, - "RARbCode": 46.47, - "RARbMath": 46.86, - "SIQA": 0.94, - "SpartQA": 3.37, - "TempReasonL1": 1.07, - "TempReasonL2Fact": 17.23, - "TempReasonL2Pure": 1.29, - "TempReasonL3Fact": 13.36, - "TempReasonL3Pure": 5.2, - "WinoGrande": 13.76 + "Model": "sentence-t5-xxl", + "AlloprofRetrieval": 45.75, + "ArguAna": 39.85, + "BSARDRetrieval": 3.33, + "CQADupstackRetrieval": 44.65, + "ClimateFEVER": 14.63, + "DBPedia": 39.19, + "FEVER": 51.2, + "FiQA2018": 46.68, + "HotpotQA": 42.14, + "MSMARCO": 27.67, + "MintakaRetrieval (fr)": 34.93, + "NFCorpus": 35.08, + "NQ": 52.87, + "QuoraRetrieval": 85.96, + "SCIDOCS": 17.17, + "SciFact": 55.38, + "SyntecRetrieval": 78.97, + "TRECCOVID": 59.48, + "Touche2020": 21.65, + "XPQARetrieval (fr)": 56.2 } ] }, "STS": { "spearman": [ { - "Model": "bge-base-en-v1.5" + "Model": "sentence-t5-xxl", + "BIOSSES": 80.43, + "SICK-R": 80.47, + "SICKFr": 77.07, + "STS12": 78.85, + "STS13": 88.94, + "STS14": 84.86, + "STS15": 89.32, + "STS16": 84.67, + "STS17 (en-en)": 89.46, + "STS22 (en)": 65.33, + "STS22 (fr)": 76.8, + "STSBenchmark": 84.01, + "STSBenchmarkMultilingualSTS (fr)": 81.24 } ] }, "Summarization": { "spearman": [ { - "Model": "bge-base-en-v1.5" + "Model": "sentence-t5-xxl", + "SummEval": 30.08, + "SummEvalFr": 30.39 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "bge-base-en-v1.5" + "Model": "sentence-t5-xxl" } ] } }, - "text-search-ada-doc-001": { + "bge-m3": { "BitextMining": { "f1": [ { - "Model": "text-search-ada-doc-001" + "Model": "bge-m3", + "Tatoeba (rus-Cyrl_eng-Latn)": 93.42 } ] }, "Classification": { "accuracy": [ { - "Model": "text-search-ada-doc-001" + "Model": "bge-m3", + "GeoreviewClassification (rus-Cyrl)": 48.27, + "HeadlineClassification (rus-Cyrl)": 70.32, + "InappropriatenessClassification (rus-Cyrl)": 59.87, + "KinopoiskClassification (rus-Cyrl)": 58.23, + "MassiveIntentClassification (rus-Cyrl)": 68.75, + "MassiveScenarioClassification (rus-Cyrl)": 73.42, + "RuReviewsClassification (rus-Cyrl)": 66.91, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 55.81, + "RuSciBenchOECDClassification (rus-Cyrl)": 42.57 } ] }, "Clustering": { "v_measure": [ { - "Model": "text-search-ada-doc-001", - "TwentyNewsgroupsClustering": 32.92 + "Model": "bge-m3", + "GeoreviewClusteringP2P (rus-Cyrl)": 63.75, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 50.57, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 43.21 } ] }, "PairClassification": { "ap": [ { - "Model": "text-search-ada-doc-001" + "Model": "bge-m3", + "OpusparcusPC (rus-Cyrl)": 89.64, + "TERRa (rus-Cyrl)": 60.6 } ] }, "Reranking": { "map": [ { - "Model": "text-search-ada-doc-001" + "Model": "bge-m3", + "RuBQReranking (rus-Cyrl)": 74.02 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "text-search-ada-doc-001" + "Model": "bge-m3", + "ARCChallenge": 9.02, + "AlphaNLI": 24.73, + "HellaSwag": 25.67, + "LEMBNarrativeQARetrieval": 45.76, + "LEMBNeedleRetrieval": 40.25, + "LEMBPasskeyRetrieval": 46.0, + "LEMBQMSumRetrieval": 35.54, + "LEMBSummScreenFDRetrieval": 94.09, + "LEMBWikimQARetrieval": 77.73, + "PIQA": 22.93, + "Quail": 7.51, + "RARbCode": 38.8, + "RARbMath": 69.19, + "RiaNewsRetrieval (rus-Cyrl)": 82.98, + "RuBQRetrieval (rus-Cyrl)": 71.21, + "SIQA": 4.89, + "SpartQA": 7.49, + "TempReasonL1": 0.99, + "TempReasonL2Fact": 33.23, + "TempReasonL2Pure": 0.68, + "TempReasonL3Fact": 30.05, + "TempReasonL3Pure": 5.28, + "WinoGrande": 41.72 } ] }, "STS": { "spearman": [ { - "Model": "text-search-ada-doc-001" + "Model": "bge-m3", + "RUParaPhraserSTS (rus-Cyrl)": 74.9, + "RuSTSBenchmarkSTS (rus-Cyrl)": 79.87, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 79.27 } ] }, "Summarization": { "spearman": [ { - "Model": "text-search-ada-doc-001" + "Model": "bge-m3" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "text-search-ada-doc-001" + "Model": "bge-m3" } ] } }, - "flaubert_base_uncased": { + "LLM2Vec-Sheared-Llama-supervised": { "BitextMining": { "f1": [ { - "Model": "flaubert_base_uncased" + "Model": "LLM2Vec-Sheared-Llama-supervised" } ] }, "Classification": { "accuracy": [ { - "Model": "flaubert_base_uncased", - "AmazonReviewsClassification (fr)": 23.52, - "MTOPDomainClassification (fr)": 27.74, - "MTOPIntentClassification (fr)": 8.61, - "MasakhaNEWSClassification (fra)": 62.61, - "MassiveIntentClassification (fr)": 6.24, - "MassiveScenarioClassification (fr)": 10.98 + "Model": "LLM2Vec-Sheared-Llama-supervised", + "AmazonCounterfactualClassification (en)": 77.42, + "AmazonPolarityClassification": 82.05, + "AmazonReviewsClassification (en)": 40.81, + "Banking77Classification": 86.01, + "EmotionClassification": 48.38, + "ImdbClassification": 75.33, + "MTOPDomainClassification (en)": 94.09, + "MTOPIntentClassification (en)": 77.05, + "MassiveIntentClassification (en)": 75.58, + "MassiveScenarioClassification (en)": 79.16, + "ToxicConversationsClassification": 69.92, + "TweetSentimentExtractionClassification": 60.76 } ] }, "Clustering": { "v_measure": [ { - "Model": "flaubert_base_uncased", - "AlloProfClusteringP2P": 43.2, - "AlloProfClusteringS2S": 12.94, - "HALClusteringS2S": 1.8, - "MLSUMClusteringP2P": 33.22, - "MLSUMClusteringS2S": 14.9, - "MasakhaNEWSClusteringP2P (fra)": 28.49, - "MasakhaNEWSClusteringS2S (fra)": 22.58 + "Model": "LLM2Vec-Sheared-Llama-supervised", + "ArxivClusteringP2P": 43.47, + "ArxivClusteringS2S": 39.85, + "BiorxivClusteringP2P": 37.1, + "BiorxivClusteringS2S": 34.28, + "MedrxivClusteringP2P": 33.55, + "MedrxivClusteringS2S": 31.11, + "RedditClustering": 53.02, + "RedditClusteringP2P": 60.47, + "StackExchangeClustering": 63.04, + "StackExchangeClusteringP2P": 34.01, + "TwentyNewsgroupsClustering": 49.37 } ] }, "PairClassification": { "ap": [ { - "Model": "flaubert_base_uncased", - "OpusparcusPC (fr)": 82.0, - "PawsXPairClassification (fr)": 52.78 + "Model": "LLM2Vec-Sheared-Llama-supervised", + "SprintDuplicateQuestions": 96.25, + "TwitterSemEval2015": 76.14, + "TwitterURLCorpus": 86.23 } ] }, "Reranking": { "map": [ { - "Model": "flaubert_base_uncased", - "AlloprofReranking": 34.55, - "SyntecReranking": 57.18 + "Model": "LLM2Vec-Sheared-Llama-supervised", + "AskUbuntuDupQuestions": 60.71, + "MindSmallReranking": 31.96, + "SciDocsRR": 79.23, + "StackOverflowDupQuestions": 49.61 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "flaubert_base_uncased", - "AlloprofRetrieval": 1.72, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 0.51, - "SyntecRetrieval": 22.33, - "XPQARetrieval (fr)": 9.09 + "Model": "LLM2Vec-Sheared-Llama-supervised", + "ArguAna": 51.66, + "CQADupstackRetrieval": 41.73, + "ClimateFEVER": 33.49, + "DBPedia": 43.58, + "FEVER": 86.81, + "FiQA2018": 41.0, + "HotpotQA": 63.85, + "MSMARCO": 38.32, + "NFCorpus": 37.12, + "NQ": 53.89, + "QuoraRetrieval": 87.37, + "SCIDOCS": 17.96, + "SciFact": 72.08, + "TRECCOVID": 80.41, + "Touche2020": 22.31 } ] }, "STS": { "spearman": [ { - "Model": "flaubert_base_uncased", - "SICKFr": 41.9, - "STS22 (fr)": 55.15, - "STSBenchmarkMultilingualSTS (fr)": 33.41 + "Model": "LLM2Vec-Sheared-Llama-supervised", + "BIOSSES": 85.88, + "SICK-R": 82.25, + "STS12": 78.28, + "STS13": 85.52, + "STS14": 82.49, + "STS15": 88.76, + "STS16": 87.11, + "STS17 (en-en)": 90.1, + "STS22 (en)": 68.25, + "STSBenchmark": 87.16 } ] }, "Summarization": { "spearman": [ { - "Model": "flaubert_base_uncased", - "SummEvalFr": 29.43 + "Model": "LLM2Vec-Sheared-Llama-supervised", + "SummEval": 30.01 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "flaubert_base_uncased" + "Model": "LLM2Vec-Sheared-Llama-supervised" } ] } }, - "cross-en-de-roberta-sentence-transformer": { + "bge-base-en-v1.5": { "BitextMining": { "f1": [ { - "Model": "cross-en-de-roberta-sentence-transformer" + "Model": "bge-base-en-v1.5" } ] }, "Classification": { "accuracy": [ { - "Model": "cross-en-de-roberta-sentence-transformer" + "Model": "bge-base-en-v1.5" } ] }, "Clustering": { "v_measure": [ { - "Model": "cross-en-de-roberta-sentence-transformer", - "BlurbsClusteringP2P": 30.82, - "BlurbsClusteringS2S": 12.69, - "TenKGnadClusteringP2P": 23.5, - "TenKGnadClusteringS2S": 10.94 + "Model": "bge-base-en-v1.5", + "BiorxivClusteringP2P": 39.44, + "BiorxivClusteringS2S": 36.62, + "MedrxivClusteringP2P": 33.21, + "MedrxivClusteringS2S": 31.68, + "RedditClustering": 56.61, + "RedditClusteringP2P": 62.66, + "StackExchangeClustering": 66.11, + "StackExchangeClusteringP2P": 35.24, + "TwentyNewsgroupsClustering": 50.75 } ] }, "PairClassification": { "ap": [ { - "Model": "cross-en-de-roberta-sentence-transformer" + "Model": "bge-base-en-v1.5" } ] }, "Reranking": { "map": [ { - "Model": "cross-en-de-roberta-sentence-transformer" + "Model": "bge-base-en-v1.5" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "cross-en-de-roberta-sentence-transformer" + "Model": "bge-base-en-v1.5", + "ARCChallenge": 9.66, + "AlphaNLI": 10.99, + "HellaSwag": 26.64, + "PIQA": 25.69, + "Quail": 1.42, + "RARbCode": 46.47, + "RARbMath": 46.86, + "SIQA": 0.94, + "SpartQA": 3.37, + "TempReasonL1": 1.07, + "TempReasonL2Fact": 17.23, + "TempReasonL2Pure": 1.29, + "TempReasonL3Fact": 13.36, + "TempReasonL3Pure": 5.2, + "WinoGrande": 13.76 } ] }, "STS": { "spearman": [ { - "Model": "cross-en-de-roberta-sentence-transformer" + "Model": "bge-base-en-v1.5" } ] }, "Summarization": { "spearman": [ { - "Model": "cross-en-de-roberta-sentence-transformer" + "Model": "bge-base-en-v1.5" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "cross-en-de-roberta-sentence-transformer" + "Model": "bge-base-en-v1.5" } ] } }, - "bert-base-10lang-cased": { + "m3e-base": { "BitextMining": { "f1": [ { - "Model": "bert-base-10lang-cased" + "Model": "m3e-base" } ] }, "Classification": { "accuracy": [ { - "Model": "bert-base-10lang-cased", - "AmazonReviewsClassification (fr)": 29.38, - "MTOPDomainClassification (fr)": 63.65, - "MTOPIntentClassification (fr)": 37.87, - "MasakhaNEWSClassification (fra)": 63.93, - "MassiveIntentClassification (fr)": 37.28, - "MassiveScenarioClassification (fr)": 44.5 + "Model": "m3e-base", + "AmazonReviewsClassification (zh)": 43.02, + "IFlyTek": 44.42, + "JDReview": 85.33, + "MassiveIntentClassification (zh-CN)": 68.4, + "MassiveScenarioClassification (zh-CN)": 74.6, + "MultilingualSentiment": 71.9, + "OnlineShopping": 87.77, + "TNews": 48.28, + "Waimai": 83.99 } ] }, "Clustering": { "v_measure": [ { - "Model": "bert-base-10lang-cased", - "AlloProfClusteringP2P": 53.22, - "AlloProfClusteringS2S": 42.92, - "HALClusteringS2S": 19.94, - "MLSUMClusteringP2P": 40.96, - "MLSUMClusteringS2S": 31.87, - "MasakhaNEWSClusteringP2P (fra)": 24.23, - "MasakhaNEWSClusteringS2S (fra)": 24.46 + "Model": "m3e-base", + "CLSClusteringP2P": 39.81, + "CLSClusteringS2S": 37.34, + "ThuNewsClusteringP2P": 59.77, + "ThuNewsClusteringS2S": 53.78 } ] }, "PairClassification": { "ap": [ { - "Model": "bert-base-10lang-cased", - "OpusparcusPC (fr)": 86.79, - "PawsXPairClassification (fr)": 53.4 + "Model": "m3e-base", + "Cmnli": 69.98, + "Ocnli": 58.0 } ] }, "Reranking": { "map": [ { - "Model": "bert-base-10lang-cased", - "AlloprofReranking": 36.21, - "SyntecReranking": 53.25 + "Model": "m3e-base", + "CMedQAv1": 77.05, + "CMedQAv2": 76.76, + "MMarcoReranking": 17.51, + "T2Reranking": 66.03 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "bert-base-10lang-cased", - "AlloprofRetrieval": 1.6, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 3.55, - "SyntecRetrieval": 18.95, - "XPQARetrieval (fr)": 18.39 + "Model": "m3e-base", + "CmedqaRetrieval": 30.33, + "CovidRetrieval": 66.42, + "DuRetrieval": 75.76, + "EcomRetrieval": 50.27, + "MMarcoRetrieval": 65.46, + "MedicalRetrieval": 42.79, + "T2Retrieval": 73.14, + "VideoRetrieval": 51.11 } ] }, "STS": { "spearman": [ { - "Model": "bert-base-10lang-cased", - "SICKFr": 58.76, - "STS22 (fr)": 40.31, - "STSBenchmarkMultilingualSTS (fr)": 52.25 + "Model": "m3e-base", + "AFQMC": 35.87, + "ATEC": 41.27, + "BQ": 63.81, + "LCQMC": 74.88, + "PAWSX": 12.19, + "QBQTC": 32.07, + "STS22 (zh)": 66.73, + "STSB": 76.97 } ] }, "Summarization": { "spearman": [ { - "Model": "bert-base-10lang-cased", - "SummEvalFr": 29.06 + "Model": "m3e-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "bert-base-10lang-cased" + "Model": "m3e-base" } ] } }, - "flan-t5-large": { + "nomic-embed-text-v1.5-128": { "BitextMining": { "f1": [ { - "Model": "flan-t5-large" + "Model": "nomic-embed-text-v1.5-128" } ] }, "Classification": { "accuracy": [ { - "Model": "flan-t5-large" + "Model": "nomic-embed-text-v1.5-128", + "AmazonCounterfactualClassification (en)": 69.78, + "AmazonPolarityClassification": 88.74, + "AmazonReviewsClassification (en)": 43.11, + "Banking77Classification": 82.78, + "EmotionClassification": 42.92, + "ImdbClassification": 80.87, + "MTOPDomainClassification (en)": 89.61, + "MTOPIntentClassification (en)": 68.9, + "MassiveIntentClassification (en)": 69.34, + "MassiveScenarioClassification (en)": 74.21, + "ToxicConversationsClassification": 68.16, + "TweetSentimentExtractionClassification": 57.99 } ] }, "Clustering": { "v_measure": [ { - "Model": "flan-t5-large" + "Model": "nomic-embed-text-v1.5-128", + "ArxivClusteringP2P": 43.87, + "ArxivClusteringS2S": 34.57, + "BiorxivClusteringP2P": 36.79, + "BiorxivClusteringS2S": 30.68, + "MedrxivClusteringP2P": 34.09, + "MedrxivClusteringS2S": 31.3, + "RedditClustering": 53.31, + "RedditClusteringP2P": 58.96, + "StackExchangeClustering": 59.92, + "StackExchangeClusteringP2P": 33.88, + "TwentyNewsgroupsClustering": 47.29 } ] }, "PairClassification": { "ap": [ { - "Model": "flan-t5-large" + "Model": "nomic-embed-text-v1.5-128", + "SprintDuplicateQuestions": 91.45, + "TwitterSemEval2015": 73.23, + "TwitterURLCorpus": 85.93 } ] }, "Reranking": { "map": [ { - "Model": "flan-t5-large" + "Model": "nomic-embed-text-v1.5-128", + "AskUbuntuDupQuestions": 61.16, + "MindSmallReranking": 30.02, + "SciDocsRR": 78.05, + "StackOverflowDupQuestions": 49.0 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "flan-t5-large" + "Model": "nomic-embed-text-v1.5-128", + "ArguAna": 43.4, + "CQADupstackRetrieval": 34.67, + "ClimateFEVER": 36.52, + "DBPedia": 36.22, + "FEVER": 80.48, + "FiQA2018": 32.08, + "HotpotQA": 60.09, + "MSMARCO": 39.99, + "NFCorpus": 30.72, + "NQ": 53.62, + "QuoraRetrieval": 87.07, + "SCIDOCS": 15.56, + "SciFact": 64.28, + "TRECCOVID": 74.58, + "Touche2020": 26.99 } ] }, "STS": { "spearman": [ { - "Model": "flan-t5-large" + "Model": "nomic-embed-text-v1.5-128", + "BIOSSES": 80.19, + "SICK-R": 79.09, + "STS12": 77.49, + "STS13": 85.62, + "STS14": 80.5, + "STS15": 85.84, + "STS16": 83.9, + "STS17 (en-en)": 86.27, + "STS22 (en)": 64.24, + "STSBenchmark": 84.28 } ] }, "Summarization": { "spearman": [ { - "Model": "flan-t5-large" + "Model": "nomic-embed-text-v1.5-128", + "SummEval": 29.59 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "flan-t5-large", - "Core17InstructionRetrieval": 1.32, - "News21InstructionRetrieval": 8.95, - "Robust04InstructionRetrieval": 3.9 + "Model": "nomic-embed-text-v1.5-128" } ] } }, - "llama-2-7b-chat": { + "e5-mistral-7b-instruct-noinstruct": { "BitextMining": { "f1": [ { - "Model": "llama-2-7b-chat" + "Model": "e5-mistral-7b-instruct-noinstruct" } ] }, "Classification": { "accuracy": [ { - "Model": "llama-2-7b-chat" + "Model": "e5-mistral-7b-instruct-noinstruct" } ] }, "Clustering": { "v_measure": [ { - "Model": "llama-2-7b-chat" + "Model": "e5-mistral-7b-instruct-noinstruct" } ] }, "PairClassification": { "ap": [ { - "Model": "llama-2-7b-chat" + "Model": "e5-mistral-7b-instruct-noinstruct" } ] }, "Reranking": { "map": [ { - "Model": "llama-2-7b-chat" + "Model": "e5-mistral-7b-instruct-noinstruct" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "llama-2-7b-chat" + "Model": "e5-mistral-7b-instruct-noinstruct", + "ARCChallenge": 20.48, + "AlphaNLI": 18.88, + "HellaSwag": 32.25, + "PIQA": 32.8, + "Quail": 6.25, + "RARbCode": 79.84, + "RARbMath": 76.19, + "SIQA": 5.08, + "SpartQA": 10.87, + "TempReasonL1": 3.04, + "TempReasonL2Fact": 35.63, + "TempReasonL2Pure": 9.32, + "TempReasonL3Fact": 30.41, + "TempReasonL3Pure": 14.39, + "WinoGrande": 45.18 } ] }, "STS": { "spearman": [ { - "Model": "llama-2-7b-chat" + "Model": "e5-mistral-7b-instruct-noinstruct" } ] }, "Summarization": { "spearman": [ { - "Model": "llama-2-7b-chat" + "Model": "e5-mistral-7b-instruct-noinstruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "llama-2-7b-chat", - "Core17InstructionRetrieval": 2.84, - "News21InstructionRetrieval": 0.23, - "Robust04InstructionRetrieval": 2.0 + "Model": "e5-mistral-7b-instruct-noinstruct" } ] } }, - "bge-small-en-v1.5": { + "sentence-t5-large": { "BitextMining": { "f1": [ { - "Model": "bge-small-en-v1.5" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "bge-small-en-v1.5" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "bge-small-en-v1.5" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "bge-small-en-v1.5" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "bge-small-en-v1.5" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "bge-small-en-v1.5", - "ARCChallenge": 8.95, - "AlphaNLI": 11.64, - "HellaSwag": 25.44, - "PIQA": 23.92, - "Quail": 1.75, - "RARbCode": 42.36, - "RARbMath": 44.98, - "SIQA": 0.77, - "SpartQA": 3.55, - "TempReasonL1": 1.41, - "TempReasonL2Fact": 17.56, - "TempReasonL2Pure": 1.05, - "TempReasonL3Fact": 13.88, - "TempReasonL3Pure": 4.76, - "WinoGrande": 10.28 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "bge-small-en-v1.5" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "bge-small-en-v1.5" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "bge-small-en-v1.5" - } - ] - } - }, - "tart-dual-contriever-msmarco": { - "BitextMining": { - "f1": [ - { - "Model": "tart-dual-contriever-msmarco" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "tart-dual-contriever-msmarco" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "tart-dual-contriever-msmarco" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "tart-dual-contriever-msmarco" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "tart-dual-contriever-msmarco" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "tart-dual-contriever-msmarco" - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "tart-dual-contriever-msmarco" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "tart-dual-contriever-msmarco" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "tart-dual-contriever-msmarco", - "Core17InstructionRetrieval": -3.04, - "News21InstructionRetrieval": -2.98, - "Robust04InstructionRetrieval": -8.98 - } - ] - } - }, - "bert-base-15lang-cased": { - "BitextMining": { - "f1": [ - { - "Model": "bert-base-15lang-cased" + "Model": "sentence-t5-large", + "BUCC (de-en)": 87.0, + "BUCC (fr-en)": 88.91, + "BUCC (ru-en)": 0.44, + "BUCC (zh-en)": 0.95, + "Tatoeba (afr-eng)": 23.7, + "Tatoeba (amh-eng)": 0.65, + "Tatoeba (ang-eng)": 30.98, + "Tatoeba (ara-eng)": 0.48, + "Tatoeba (arq-eng)": 0.68, + "Tatoeba (arz-eng)": 0.22, + "Tatoeba (ast-eng)": 55.3, + "Tatoeba (awa-eng)": 1.03, + "Tatoeba (aze-eng)": 5.83, + "Tatoeba (bel-eng)": 1.66, + "Tatoeba (ben-eng)": 0.0, + "Tatoeba (ber-eng)": 5.62, + "Tatoeba (bos-eng)": 12.23, + "Tatoeba (bre-eng)": 5.84, + "Tatoeba (bul-eng)": 1.35, + "Tatoeba (cat-eng)": 48.56, + "Tatoeba (cbk-eng)": 46.97, + "Tatoeba (ceb-eng)": 9.79, + "Tatoeba (ces-eng)": 6.0, + "Tatoeba (cha-eng)": 24.21, + "Tatoeba (cmn-eng)": 2.26, + "Tatoeba (cor-eng)": 4.03, + "Tatoeba (csb-eng)": 9.53, + "Tatoeba (cym-eng)": 9.17, + "Tatoeba (dan-eng)": 34.63, + "Tatoeba (deu-eng)": 89.31, + "Tatoeba (dsb-eng)": 9.68, + "Tatoeba (dtp-eng)": 4.66, + "Tatoeba (ell-eng)": 0.77, + "Tatoeba (epo-eng)": 26.88, + "Tatoeba (est-eng)": 5.19, + "Tatoeba (eus-eng)": 9.46, + "Tatoeba (fao-eng)": 21.59, + "Tatoeba (fin-eng)": 5.66, + "Tatoeba (fra-eng)": 79.71, + "Tatoeba (fry-eng)": 28.29, + "Tatoeba (gla-eng)": 2.34, + "Tatoeba (gle-eng)": 3.55, + "Tatoeba (glg-eng)": 56.25, + "Tatoeba (gsw-eng)": 24.25, + "Tatoeba (heb-eng)": 0.57, + "Tatoeba (hin-eng)": 0.12, + "Tatoeba (hrv-eng)": 10.29, + "Tatoeba (hsb-eng)": 9.52, + "Tatoeba (hun-eng)": 6.22, + "Tatoeba (hye-eng)": 0.81, + "Tatoeba (ido-eng)": 41.11, + "Tatoeba (ile-eng)": 54.0, + "Tatoeba (ina-eng)": 75.47, + "Tatoeba (ind-eng)": 13.02, + "Tatoeba (isl-eng)": 8.98, + "Tatoeba (ita-eng)": 67.23, + "Tatoeba (jav-eng)": 8.54, + "Tatoeba (jpn-eng)": 0.99, + "Tatoeba (kab-eng)": 1.85, + "Tatoeba (kat-eng)": 1.37, + "Tatoeba (kaz-eng)": 0.67, + "Tatoeba (khm-eng)": 0.56, + "Tatoeba (kor-eng)": 1.73, + "Tatoeba (kur-eng)": 9.23, + "Tatoeba (kzj-eng)": 5.38, + "Tatoeba (lat-eng)": 21.3, + "Tatoeba (lfn-eng)": 40.48, + "Tatoeba (lit-eng)": 5.38, + "Tatoeba (lvs-eng)": 6.83, + "Tatoeba (mal-eng)": 0.45, + "Tatoeba (mar-eng)": 0.01, + "Tatoeba (max-eng)": 16.44, + "Tatoeba (mhr-eng)": 0.33, + "Tatoeba (mkd-eng)": 0.4, + "Tatoeba (mon-eng)": 2.48, + "Tatoeba (nds-eng)": 34.66, + "Tatoeba (nld-eng)": 42.72, + "Tatoeba (nno-eng)": 24.08, + "Tatoeba (nob-eng)": 34.17, + "Tatoeba (nov-eng)": 55.01, + "Tatoeba (oci-eng)": 29.15, + "Tatoeba (orv-eng)": 0.2, + "Tatoeba (pam-eng)": 6.99, + "Tatoeba (pes-eng)": 0.9, + "Tatoeba (pms-eng)": 30.8, + "Tatoeba (pol-eng)": 12.81, + "Tatoeba (por-eng)": 73.45, + "Tatoeba (ron-eng)": 54.86, + "Tatoeba (rus-eng)": 2.43, + "Tatoeba (slk-eng)": 8.35, + "Tatoeba (slv-eng)": 9.3, + "Tatoeba (spa-eng)": 78.87, + "Tatoeba (sqi-eng)": 11.74, + "Tatoeba (srp-eng)": 5.83, + "Tatoeba (swe-eng)": 35.41, + "Tatoeba (swg-eng)": 28.18, + "Tatoeba (swh-eng)": 7.53, + "Tatoeba (tam-eng)": 0.36, + "Tatoeba (tat-eng)": 1.01, + "Tatoeba (tel-eng)": 1.1, + "Tatoeba (tgl-eng)": 12.4, + "Tatoeba (tha-eng)": 1.58, + "Tatoeba (tuk-eng)": 4.95, + "Tatoeba (tur-eng)": 6.45, + "Tatoeba (tzl-eng)": 37.82, + "Tatoeba (uig-eng)": 0.67, + "Tatoeba (ukr-eng)": 1.88, + "Tatoeba (urd-eng)": 0.0, + "Tatoeba (uzb-eng)": 4.79, + "Tatoeba (vie-eng)": 7.03, + "Tatoeba (war-eng)": 9.68, + "Tatoeba (wuu-eng)": 1.28, + "Tatoeba (xho-eng)": 10.64, + "Tatoeba (yid-eng)": 0.57, + "Tatoeba (yue-eng)": 0.88, + "Tatoeba (zsm-eng)": 14.67 } ] }, "Classification": { "accuracy": [ { - "Model": "bert-base-15lang-cased", - "AmazonReviewsClassification (fr)": 29.35, - "MTOPDomainClassification (fr)": 63.7, - "MTOPIntentClassification (fr)": 37.85, - "MasakhaNEWSClassification (fra)": 63.89, - "MassiveIntentClassification (fr)": 37.28, - "MassiveScenarioClassification (fr)": 44.47 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "bert-base-15lang-cased", - "AlloProfClusteringP2P": 53.16, - "AlloProfClusteringS2S": 43.43, - "HALClusteringS2S": 20.26, - "MLSUMClusteringP2P": 41.22, - "MLSUMClusteringS2S": 31.88, - "MasakhaNEWSClusteringP2P (fra)": 24.23, - "MasakhaNEWSClusteringS2S (fra)": 24.46 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "bert-base-15lang-cased", - "OpusparcusPC (fr)": 86.78, - "PawsXPairClassification (fr)": 53.38 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "bert-base-15lang-cased", - "AlloprofReranking": 36.21, - "SyntecReranking": 53.25 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "bert-base-15lang-cased", - "AlloprofRetrieval": 1.61, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 3.55, - "SyntecRetrieval": 18.95, - "XPQARetrieval (fr)": 18.35 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "bert-base-15lang-cased", - "SICKFr": 58.77, - "STS22 (fr)": 40.4, - "STSBenchmarkMultilingualSTS (fr)": 52.25 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "bert-base-15lang-cased", - "SummEvalFr": 29.13 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "bert-base-15lang-cased" - } - ] - } - }, - "USER-base": { - "BitextMining": { - "f1": [ - { - "Model": "USER-base", - "Tatoeba (rus-Cyrl_eng-Latn)": 90.2 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "USER-base", - "GeoreviewClassification (rus-Cyrl)": 47.23, - "HeadlineClassification (rus-Cyrl)": 74.88, - "InappropriatenessClassification (rus-Cyrl)": 61.94, - "KinopoiskClassification (rus-Cyrl)": 55.69, - "MassiveIntentClassification (rus-Cyrl)": 65.57, - "MassiveScenarioClassification (rus-Cyrl)": 68.33, - "RuReviewsClassification (rus-Cyrl)": 66.44, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 55.55, - "RuSciBenchOECDClassification (rus-Cyrl)": 43.28 + "Model": "sentence-t5-large", + "AmazonCounterfactualClassification (de)": 67.97, + "AmazonCounterfactualClassification (en)": 75.51, + "AmazonCounterfactualClassification (en-ext)": 75.44, + "AmazonCounterfactualClassification (ja)": 45.72, + "AmazonPolarityClassification": 92.87, + "AmazonReviewsClassification (de)": 43.16, + "AmazonReviewsClassification (en)": 47.12, + "AmazonReviewsClassification (es)": 42.89, + "AmazonReviewsClassification (fr)": 41.48, + "AmazonReviewsClassification (ja)": 22.49, + "AmazonReviewsClassification (zh)": 22.12, + "Banking77Classification": 78.46, + "EmotionClassification": 51.74, + "ImdbClassification": 87.01, + "MTOPDomainClassification (de)": 80.56, + "MTOPDomainClassification (en)": 90.99, + "MTOPDomainClassification (es)": 80.78, + "MTOPDomainClassification (fr)": 79.6, + "MTOPDomainClassification (hi)": 21.22, + "MTOPDomainClassification (th)": 15.82, + "MTOPIntentClassification (de)": 52.5, + "MTOPIntentClassification (en)": 64.98, + "MTOPIntentClassification (es)": 52.07, + "MTOPIntentClassification (fr)": 47.73, + "MTOPIntentClassification (hi)": 3.74, + "MTOPIntentClassification (th)": 4.96, + "MasakhaNEWSClassification (fra)": 80.43, + "MassiveIntentClassification (af)": 38.41, + "MassiveIntentClassification (am)": 2.49, + "MassiveIntentClassification (ar)": 4.7, + "MassiveIntentClassification (az)": 31.77, + "MassiveIntentClassification (bn)": 2.77, + "MassiveIntentClassification (cy)": 31.69, + "MassiveIntentClassification (da)": 41.76, + "MassiveIntentClassification (de)": 52.01, + "MassiveIntentClassification (el)": 9.74, + "MassiveIntentClassification (en)": 71.78, + "MassiveIntentClassification (es)": 54.1, + "MassiveIntentClassification (fa)": 3.86, + "MassiveIntentClassification (fi)": 34.07, + "MassiveIntentClassification (fr)": 57.01, + "MassiveIntentClassification (he)": 2.14, + "MassiveIntentClassification (hi)": 2.97, + "MassiveIntentClassification (hu)": 32.01, + "MassiveIntentClassification (hy)": 3.17, + "MassiveIntentClassification (id)": 34.55, + "MassiveIntentClassification (is)": 32.0, + "MassiveIntentClassification (it)": 52.94, + "MassiveIntentClassification (ja)": 2.9, + "MassiveIntentClassification (jv)": 32.42, + "MassiveIntentClassification (ka)": 2.71, + "MassiveIntentClassification (km)": 5.5, + "MassiveIntentClassification (kn)": 2.41, + "MassiveIntentClassification (ko)": 2.57, + "MassiveIntentClassification (lv)": 35.09, + "MassiveIntentClassification (ml)": 2.95, + "MassiveIntentClassification (mn)": 18.33, + "MassiveIntentClassification (ms)": 29.69, + "MassiveIntentClassification (my)": 3.99, + "MassiveIntentClassification (nb)": 41.29, + "MassiveIntentClassification (nl)": 44.95, + "MassiveIntentClassification (pl)": 37.67, + "MassiveIntentClassification (pt)": 51.96, + "MassiveIntentClassification (ro)": 43.83, + "MassiveIntentClassification (ru)": 17.32, + "MassiveIntentClassification (sl)": 33.71, + "MassiveIntentClassification (sq)": 37.62, + "MassiveIntentClassification (sv)": 40.67, + "MassiveIntentClassification (sw)": 31.9, + "MassiveIntentClassification (ta)": 1.91, + "MassiveIntentClassification (te)": 2.54, + "MassiveIntentClassification (th)": 3.85, + "MassiveIntentClassification (tl)": 36.83, + "MassiveIntentClassification (tr)": 33.0, + "MassiveIntentClassification (ur)": 2.62, + "MassiveIntentClassification (vi)": 22.81, + "MassiveIntentClassification (zh-CN)": 1.09, + "MassiveIntentClassification (zh-TW)": 3.49, + "MassiveScenarioClassification (af)": 50.28, + "MassiveScenarioClassification (am)": 7.15, + "MassiveScenarioClassification (ar)": 12.12, + "MassiveScenarioClassification (az)": 39.68, + "MassiveScenarioClassification (bn)": 8.06, + "MassiveScenarioClassification (cy)": 38.01, + "MassiveScenarioClassification (da)": 51.44, + "MassiveScenarioClassification (de)": 62.71, + "MassiveScenarioClassification (el)": 17.19, + "MassiveScenarioClassification (en)": 73.16, + "MassiveScenarioClassification (es)": 59.56, + "MassiveScenarioClassification (fa)": 6.5, + "MassiveScenarioClassification (fi)": 41.72, + "MassiveScenarioClassification (fr)": 63.6, + "MassiveScenarioClassification (he)": 7.93, + "MassiveScenarioClassification (hi)": 7.85, + "MassiveScenarioClassification (hu)": 41.37, + "MassiveScenarioClassification (hy)": 9.42, + "MassiveScenarioClassification (id)": 44.88, + "MassiveScenarioClassification (is)": 40.86, + "MassiveScenarioClassification (it)": 60.09, + "MassiveScenarioClassification (ja)": 6.56, + "MassiveScenarioClassification (jv)": 40.18, + "MassiveScenarioClassification (ka)": 7.37, + "MassiveScenarioClassification (km)": 9.56, + "MassiveScenarioClassification (kn)": 8.4, + "MassiveScenarioClassification (ko)": 5.96, + "MassiveScenarioClassification (lv)": 41.44, + "MassiveScenarioClassification (ml)": 7.47, + "MassiveScenarioClassification (mn)": 25.36, + "MassiveScenarioClassification (ms)": 39.69, + "MassiveScenarioClassification (my)": 9.68, + "MassiveScenarioClassification (nb)": 49.92, + "MassiveScenarioClassification (nl)": 56.09, + "MassiveScenarioClassification (pl)": 45.2, + "MassiveScenarioClassification (pt)": 57.99, + "MassiveScenarioClassification (ro)": 56.0, + "MassiveScenarioClassification (ru)": 27.47, + "MassiveScenarioClassification (sl)": 41.04, + "MassiveScenarioClassification (sq)": 49.38, + "MassiveScenarioClassification (sv)": 50.97, + "MassiveScenarioClassification (sw)": 40.62, + "MassiveScenarioClassification (ta)": 7.59, + "MassiveScenarioClassification (te)": 7.07, + "MassiveScenarioClassification (th)": 8.52, + "MassiveScenarioClassification (tl)": 49.89, + "MassiveScenarioClassification (tr)": 43.08, + "MassiveScenarioClassification (ur)": 9.31, + "MassiveScenarioClassification (vi)": 27.46, + "MassiveScenarioClassification (zh-CN)": 4.7, + "MassiveScenarioClassification (zh-TW)": 7.24, + "ToxicConversationsClassification": 71.73, + "TweetSentimentExtractionClassification": 62.33 } ] }, "Clustering": { "v_measure": [ { - "Model": "USER-base", - "GeoreviewClusteringP2P (rus-Cyrl)": 64.16, - "MLSUMClusteringP2P (rus-Cyrl)": 48.09, - "MLSUMClusteringS2S (rus-Cyrl)": 45.73, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 51.38, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.73 + "Model": "sentence-t5-large", + "AlloProfClusteringP2P": 61.82, + "AlloProfClusteringS2S": 39.78, + "ArxivClusteringP2P": 41.62, + "ArxivClusteringS2S": 29.44, + "BiorxivClusteringP2P": 35.99, + "BiorxivClusteringS2S": 24.02, + "BlurbsClusteringP2P": 35.33, + "BlurbsClusteringS2S": 13.27, + "HALClusteringS2S": 18.73, + "MLSUMClusteringP2P": 42.07, + "MLSUMClusteringS2S": 31.87, + "MasakhaNEWSClusteringP2P (fra)": 58.6, + "MasakhaNEWSClusteringS2S (fra)": 31.33, + "MedrxivClusteringP2P": 32.4, + "MedrxivClusteringS2S": 26.33, + "RedditClustering": 54.53, + "RedditClusteringP2P": 62.5, + "StackExchangeClustering": 65.11, + "StackExchangeClusteringP2P": 36.86, + "TenKGnadClusteringP2P": 44.11, + "TenKGnadClusteringS2S": 17.26, + "TwentyNewsgroupsClustering": 49.33 } ] }, "PairClassification": { "ap": [ { - "Model": "USER-base", - "OpusparcusPC (rus-Cyrl)": 91.65, - "TERRa (rus-Cyrl)": 60.02 + "Model": "sentence-t5-large", + "OpusparcusPC (fr)": 91.19, + "PawsXPairClassification (fr)": 59.59, + "SprintDuplicateQuestions": 89.01, + "TwitterSemEval2015": 79.75, + "TwitterURLCorpus": 86.14 } ] }, "Reranking": { "map": [ { - "Model": "USER-base", - "RuBQReranking (rus-Cyrl)": 64.42 + "Model": "sentence-t5-large", + "AlloprofReranking": 57.99, + "AskUbuntuDupQuestions": 61.51, + "MindSmallReranking": 30.27, + "SciDocsRR": 74.88, + "StackOverflowDupQuestions": 49.34, + "SyntecReranking": 79.77 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "USER-base", - "RiaNewsRetrieval (rus-Cyrl)": 77.83, - "RuBQRetrieval (rus-Cyrl)": 56.86 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "USER-base", - "RUParaPhraserSTS (rus-Cyrl)": 73.56, - "RuSTSBenchmarkSTS (rus-Cyrl)": 82.26, - "STS22 (rus-Cyrl)": 63.39, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 81.81 - } - ] - }, - "Summarization": { + "Model": "sentence-t5-large", + "AlloprofRetrieval": 34.52, + "ArguAna": 39.27, + "BSARDRetrieval": 0.0, + "CQADupstackRetrieval": 38.96, + "ClimateFEVER": 11.36, + "DBPedia": 31.55, + "FEVER": 36.21, + "FiQA2018": 43.55, + "HotpotQA": 33.95, + "MSMARCO": 23.96, + "MintakaRetrieval (fr)": 23.92, + "NFCorpus": 31.1, + "NQ": 42.02, + "QuoraRetrieval": 85.73, + "SCIDOCS": 15.38, + "SciFact": 49.91, + "SyntecRetrieval": 71.05, + "TRECCOVID": 46.11, + "Touche2020": 21.63, + "XPQARetrieval (fr)": 48.79 + } + ] + }, + "STS": { "spearman": [ { - "Model": "USER-base" + "Model": "sentence-t5-large", + "BIOSSES": 78.93, + "SICK-R": 80.34, + "SICKFr": 72.83, + "STS12": 79.11, + "STS13": 87.33, + "STS14": 83.17, + "STS15": 88.28, + "STS16": 84.36, + "STS17 (ar-ar)": 10.75, + "STS17 (en-ar)": -4.71, + "STS17 (en-de)": 73.62, + "STS17 (en-en)": 88.99, + "STS17 (en-tr)": -0.42, + "STS17 (es-en)": 62.62, + "STS17 (es-es)": 82.74, + "STS17 (fr-en)": 67.86, + "STS17 (it-en)": 51.86, + "STS17 (ko-ko)": 9.44, + "STS17 (nl-en)": 45.95, + "STS22 (ar)": 27.01, + "STS22 (de)": 43.73, + "STS22 (de-en)": 49.93, + "STS22 (de-fr)": 61.58, + "STS22 (de-pl)": 38.83, + "STS22 (en)": 62.39, + "STS22 (es)": 57.68, + "STS22 (es-en)": 68.09, + "STS22 (es-it)": 61.58, + "STS22 (fr)": 75.01, + "STS22 (fr-pl)": 5.63, + "STS22 (it)": 62.01, + "STS22 (pl)": 25.0, + "STS22 (pl-en)": 51.72, + "STS22 (ru)": 14.21, + "STS22 (tr)": 47.3, + "STS22 (zh)": 30.47, + "STS22 (zh-en)": 23.1, + "STSBenchmark": 85.36, + "STSBenchmarkMultilingualSTS (fr)": 77.59 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "sentence-t5-large", + "SummEval": 29.64, + "SummEvalFr": 30.23 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "USER-base" + "Model": "sentence-t5-large" } ] } }, - "sentence-t5-base": { + "text-similarity-curie-001": { "BitextMining": { "f1": [ { - "Model": "sentence-t5-base" + "Model": "text-similarity-curie-001" } ] }, "Classification": { "accuracy": [ { - "Model": "sentence-t5-base", - "AmazonCounterfactualClassification (de)": 69.98, - "AmazonCounterfactualClassification (en)": 75.82, - "AmazonCounterfactualClassification (en-ext)": 76.81, - "AmazonCounterfactualClassification (ja)": 46.05, - "AmazonPolarityClassification": 85.12, - "AmazonReviewsClassification (de)": 37.9, - "AmazonReviewsClassification (en)": 44.94, - "AmazonReviewsClassification (es)": 37.33, - "AmazonReviewsClassification (fr)": 37.35, - "AmazonReviewsClassification (ja)": 22.29, - "AmazonReviewsClassification (zh)": 21.53, - "Banking77Classification": 76.48, - "EmotionClassification": 51.35, - "ImdbClassification": 77.34, - "MTOPDomainClassification (de)": 76.98, - "MTOPDomainClassification (en)": 90.34, - "MTOPDomainClassification (es)": 73.61, - "MTOPDomainClassification (fr)": 75.03, - "MTOPDomainClassification (hi)": 21.4, - "MTOPDomainClassification (th)": 16.21, - "MTOPIntentClassification (de)": 44.43, - "MTOPIntentClassification (en)": 63.32, - "MTOPIntentClassification (es)": 42.03, - "MTOPIntentClassification (fr)": 43.85, - "MTOPIntentClassification (hi)": 3.8, - "MTOPIntentClassification (th)": 5.21, - "MasakhaNEWSClassification (fra)": 81.21, - "MassiveIntentClassification (af)": 34.32, - "MassiveIntentClassification (am)": 2.38, - "MassiveIntentClassification (ar)": 4.53, - "MassiveIntentClassification (az)": 31.76, - "MassiveIntentClassification (bn)": 2.58, - "MassiveIntentClassification (cy)": 28.94, - "MassiveIntentClassification (da)": 38.82, - "MassiveIntentClassification (de)": 45.23, - "MassiveIntentClassification (el)": 10.05, - "MassiveIntentClassification (en)": 69.74, - "MassiveIntentClassification (es)": 45.32, - "MassiveIntentClassification (fa)": 3.58, - "MassiveIntentClassification (fi)": 33.52, - "MassiveIntentClassification (fr)": 51.13, - "MassiveIntentClassification (he)": 2.63, - "MassiveIntentClassification (hi)": 2.68, - "MassiveIntentClassification (hu)": 32.31, - "MassiveIntentClassification (hy)": 3.33, - "MassiveIntentClassification (id)": 35.5, - "MassiveIntentClassification (is)": 29.82, - "MassiveIntentClassification (it)": 45.59, - "MassiveIntentClassification (ja)": 3.67, - "MassiveIntentClassification (jv)": 31.15, - "MassiveIntentClassification (ka)": 2.77, - "MassiveIntentClassification (km)": 5.66, - "MassiveIntentClassification (kn)": 2.59, - "MassiveIntentClassification (ko)": 2.34, - "MassiveIntentClassification (lv)": 33.97, - "MassiveIntentClassification (ml)": 2.55, - "MassiveIntentClassification (mn)": 14.7, - "MassiveIntentClassification (ms)": 33.12, - "MassiveIntentClassification (my)": 4.42, - "MassiveIntentClassification (nb)": 38.53, - "MassiveIntentClassification (nl)": 37.96, - "MassiveIntentClassification (pl)": 34.41, - "MassiveIntentClassification (pt)": 43.35, - "MassiveIntentClassification (ro)": 42.69, - "MassiveIntentClassification (ru)": 14.82, - "MassiveIntentClassification (sl)": 34.54, - "MassiveIntentClassification (sq)": 38.54, - "MassiveIntentClassification (sv)": 35.98, - "MassiveIntentClassification (sw)": 32.14, - "MassiveIntentClassification (ta)": 1.41, - "MassiveIntentClassification (te)": 2.5, - "MassiveIntentClassification (th)": 3.71, - "MassiveIntentClassification (tl)": 36.04, - "MassiveIntentClassification (tr)": 33.77, - "MassiveIntentClassification (ur)": 2.99, - "MassiveIntentClassification (vi)": 22.62, - "MassiveIntentClassification (zh-CN)": 1.12, - "MassiveIntentClassification (zh-TW)": 4.63, - "MassiveScenarioClassification (af)": 44.45, - "MassiveScenarioClassification (am)": 7.51, - "MassiveScenarioClassification (ar)": 12.32, - "MassiveScenarioClassification (az)": 38.41, - "MassiveScenarioClassification (bn)": 8.45, - "MassiveScenarioClassification (cy)": 35.04, - "MassiveScenarioClassification (da)": 48.36, - "MassiveScenarioClassification (de)": 59.12, - "MassiveScenarioClassification (el)": 17.68, - "MassiveScenarioClassification (en)": 72.32, - "MassiveScenarioClassification (es)": 55.61, - "MassiveScenarioClassification (fa)": 6.86, - "MassiveScenarioClassification (fi)": 41.34, - "MassiveScenarioClassification (fr)": 59.92, - "MassiveScenarioClassification (he)": 7.86, - "MassiveScenarioClassification (hi)": 7.63, - "MassiveScenarioClassification (hu)": 41.31, - "MassiveScenarioClassification (hy)": 9.23, - "MassiveScenarioClassification (id)": 44.64, - "MassiveScenarioClassification (is)": 39.63, - "MassiveScenarioClassification (it)": 54.58, - "MassiveScenarioClassification (ja)": 4.96, - "MassiveScenarioClassification (jv)": 40.73, - "MassiveScenarioClassification (ka)": 7.51, - "MassiveScenarioClassification (km)": 8.73, - "MassiveScenarioClassification (kn)": 7.99, - "MassiveScenarioClassification (ko)": 6.03, - "MassiveScenarioClassification (lv)": 36.42, - "MassiveScenarioClassification (ml)": 6.96, - "MassiveScenarioClassification (mn)": 19.85, - "MassiveScenarioClassification (ms)": 43.18, - "MassiveScenarioClassification (my)": 9.46, - "MassiveScenarioClassification (nb)": 46.6, - "MassiveScenarioClassification (nl)": 50.0, - "MassiveScenarioClassification (pl)": 42.3, - "MassiveScenarioClassification (pt)": 52.24, - "MassiveScenarioClassification (ro)": 53.7, - "MassiveScenarioClassification (ru)": 20.69, - "MassiveScenarioClassification (sl)": 39.79, - "MassiveScenarioClassification (sq)": 50.16, - "MassiveScenarioClassification (sv)": 46.69, - "MassiveScenarioClassification (sw)": 40.48, - "MassiveScenarioClassification (ta)": 7.47, - "MassiveScenarioClassification (te)": 6.87, - "MassiveScenarioClassification (th)": 8.26, - "MassiveScenarioClassification (tl)": 48.94, - "MassiveScenarioClassification (tr)": 41.83, - "MassiveScenarioClassification (ur)": 9.77, - "MassiveScenarioClassification (vi)": 30.01, - "MassiveScenarioClassification (zh-CN)": 4.17, - "MassiveScenarioClassification (zh-TW)": 7.91, - "ToxicConversationsClassification": 68.2, - "TweetSentimentExtractionClassification": 62.71 + "Model": "text-similarity-curie-001" } ] }, "Clustering": { "v_measure": [ { - "Model": "sentence-t5-base", - "AlloProfClusteringP2P": 58.44, - "AlloProfClusteringS2S": 35.93, - "ArxivClusteringP2P": 39.28, - "ArxivClusteringS2S": 27.26, - "BiorxivClusteringP2P": 33.99, - "BiorxivClusteringS2S": 22.92, - "BlurbsClusteringP2P": 30.59, - "BlurbsClusteringS2S": 11.57, - "HALClusteringS2S": 17.72, - "MLSUMClusteringP2P": 40.77, - "MLSUMClusteringS2S": 30.06, - "MasakhaNEWSClusteringP2P (fra)": 61.9, - "MasakhaNEWSClusteringS2S (fra)": 35.64, - "MedrxivClusteringP2P": 33.2, - "MedrxivClusteringS2S": 26.13, - "RedditClustering": 52.93, - "RedditClusteringP2P": 59.67, - "StackExchangeClustering": 63.13, - "StackExchangeClusteringP2P": 35.68, - "TenKGnadClusteringP2P": 44.88, - "TenKGnadClusteringS2S": 18.11, - "TwentyNewsgroupsClustering": 48.1 + "Model": "text-similarity-curie-001", + "RedditClustering": 40.79, + "StackExchangeClustering": 55.14, + "TwentyNewsgroupsClustering": 37.64 } ] }, "PairClassification": { "ap": [ { - "Model": "sentence-t5-base", - "OpusparcusPC (fr)": 89.4, - "PawsXPairClassification (fr)": 55.35, - "SprintDuplicateQuestions": 91.23, - "TwitterSemEval2015": 78.25, - "TwitterURLCorpus": 86.05 + "Model": "text-similarity-curie-001", + "SprintDuplicateQuestions": 79.85, + "TwitterSemEval2015": 69.45, + "TwitterURLCorpus": 84.06 } ] }, "Reranking": { "map": [ { - "Model": "sentence-t5-base", - "AlloprofReranking": 50.12, - "AskUbuntuDupQuestions": 59.73, - "MindSmallReranking": 30.2, - "SciDocsRR": 73.96, - "StackOverflowDupQuestions": 48.46, - "SyntecReranking": 78.05 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "sentence-t5-base", - "AlloprofRetrieval": 27.52, - "ArguAna": 44.85, - "BSARDRetrieval": 0.16, - "CQADupstackRetrieval": 35.23, - "ClimateFEVER": 10.37, - "DBPedia": 27.77, - "FEVER": 26.17, - "FiQA2018": 34.83, - "HotpotQA": 33.2, - "MSMARCO": 20.7, - "MintakaRetrieval (fr)": 21.04, - "NFCorpus": 28.65, - "NQ": 36.32, - "QuoraRetrieval": 85.49, - "SCIDOCS": 14.15, - "SciFact": 45.76, - "SyntecRetrieval": 67.0, - "TRECCOVID": 40.7, - "Touche2020": 20.3, - "XPQARetrieval (fr)": 45.19 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "sentence-t5-base", - "BIOSSES": 75.89, - "SICK-R": 80.18, - "SICKFr": 71.74, - "STS12": 78.05, - "STS13": 85.85, - "STS14": 82.19, - "STS15": 87.46, - "STS16": 84.03, - "STS17 (ar-ar)": 13.36, - "STS17 (en-ar)": -5.65, - "STS17 (en-de)": 67.11, - "STS17 (en-en)": 89.57, - "STS17 (en-tr)": -0.02, - "STS17 (es-en)": 47.72, - "STS17 (es-es)": 79.94, - "STS17 (fr-en)": 56.61, - "STS17 (it-en)": 30.46, - "STS17 (ko-ko)": 10.06, - "STS17 (nl-en)": 36.46, - "STS22 (ar)": 31.2, - "STS22 (de)": 42.08, - "STS22 (de-en)": 46.9, - "STS22 (de-fr)": 55.04, - "STS22 (de-pl)": 33.94, - "STS22 (en)": 62.66, - "STS22 (es)": 53.81, - "STS22 (es-en)": 65.19, - "STS22 (es-it)": 55.29, - "STS22 (fr)": 77.69, - "STS22 (fr-pl)": 28.17, - "STS22 (it)": 60.65, - "STS22 (pl)": 24.42, - "STS22 (pl-en)": 42.97, - "STS22 (ru)": 12.13, - "STS22 (tr)": 40.45, - "STS22 (zh)": 32.9, - "STS22 (zh-en)": 20.15, - "STSBenchmark": 85.52, - "STSBenchmarkMultilingualSTS (fr)": 74.04 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "sentence-t5-base", - "SummEval": 31.39, - "SummEvalFr": 30.01 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "sentence-t5-base" - } - ] - } - }, - "st-polish-paraphrase-from-distilroberta": { - "BitextMining": { - "f1": [ - { - "Model": "st-polish-paraphrase-from-distilroberta" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "st-polish-paraphrase-from-distilroberta", - "AllegroReviews": 34.5, - "CBD": 70.27, - "MassiveIntentClassification (pl)": 64.81, - "MassiveScenarioClassification (pl)": 70.01, - "PAC": 64.6, - "PolEmo2.0-IN": 67.06, - "PolEmo2.0-OUT": 38.58 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "st-polish-paraphrase-from-distilroberta", - "8TagsClustering": 31.68 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "st-polish-paraphrase-from-distilroberta", - "CDSC-E": 75.99, - "PPC": 93.29, - "PSC": 99.1, - "SICK-E-PL": 79.63 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "st-polish-paraphrase-from-distilroberta" + "Model": "text-similarity-curie-001", + "AskUbuntuDupQuestions": 55.09, + "SciDocsRR": 70.93, + "StackOverflowDupQuestions": 42.42 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "st-polish-paraphrase-from-distilroberta", - "ArguAna-PL": 49.42, - "DBPedia-PL": 19.82, - "FiQA-PL": 19.58, - "HotpotQA-PL": 23.47, - "MSMARCO-PL": 16.51, - "NFCorpus-PL": 22.49, - "NQ-PL": 19.83, - "Quora-PL": 81.17, - "SCIDOCS-PL": 12.15, - "SciFact-PL": 49.49, - "TRECCOVID-PL": 38.97 + "Model": "text-similarity-curie-001", + "FiQA2018": 5.14, + "NFCorpus": 19.96, + "QuoraRetrieval": 83.11, + "SciFact": 46.68, + "TRECCOVID": 7.61 } ] }, "STS": { "spearman": [ { - "Model": "st-polish-paraphrase-from-distilroberta", - "CDSC-R": 89.62, - "SICK-R-PL": 76.37, - "STS22 (pl)": 40.36 + "Model": "text-similarity-curie-001", + "BIOSSES": 77.46, + "SICK-R": 77.26, + "STSBenchmark": 83.02 } ] }, "Summarization": { "spearman": [ { - "Model": "st-polish-paraphrase-from-distilroberta" + "Model": "text-similarity-curie-001" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "st-polish-paraphrase-from-distilroberta" + "Model": "text-similarity-curie-001" } ] } @@ -5251,14877 +3745,4240 @@ ] } }, - "text2vec-large-chinese": { + "komninos": { "BitextMining": { "f1": [ { - "Model": "text2vec-large-chinese" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "text2vec-large-chinese", - "AmazonReviewsClassification (zh)": 33.77, - "IFlyTek": 41.54, - "JDReview": 81.56, - "MassiveIntentClassification (zh-CN)": 63.23, - "MassiveScenarioClassification (zh-CN)": 68.45, - "MultilingualSentiment": 58.97, - "OnlineShopping": 83.51, - "TNews": 38.92, - "Waimai": 76.01 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "text2vec-large-chinese", - "CLSClusteringP2P": 30.13, - "CLSClusteringS2S": 28.77, - "ThuNewsClusteringP2P": 35.05, - "ThuNewsClusteringS2S": 26.14 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "text2vec-large-chinese", - "Cmnli": 77.67, - "Ocnli": 64.04 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "text2vec-large-chinese", - "CMedQAv1": 58.92, - "CMedQAv2": 60.41, - "MMarcoReranking": 12.48, - "T2Reranking": 64.82 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "text2vec-large-chinese", - "CmedqaRetrieval": 15.53, - "CovidRetrieval": 60.48, - "DuRetrieval": 51.87, - "EcomRetrieval": 37.58, - "MMarcoRetrieval": 45.96, - "MedicalRetrieval": 30.93, - "T2Retrieval": 50.52, - "VideoRetrieval": 42.65 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "text2vec-large-chinese", - "AFQMC": 24.51, - "ATEC": 32.45, - "BQ": 44.22, - "LCQMC": 69.16, - "PAWSX": 14.55, - "QBQTC": 29.51, - "STS22 (zh)": 65.94, - "STSB": 79.45 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "text2vec-large-chinese" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "text2vec-large-chinese" - } - ] - } - }, - "herbert-base-retrieval-v2": { - "BitextMining": { - "f1": [ - { - "Model": "herbert-base-retrieval-v2" + "Model": "komninos", + "BUCC (de-en)": 0.18, + "BUCC (fr-en)": 0.08, + "BUCC (ru-en)": 0.15, + "BUCC (zh-en)": 0.05, + "Tatoeba (afr-eng)": 4.82, + "Tatoeba (amh-eng)": 1.18, + "Tatoeba (ang-eng)": 8.54, + "Tatoeba (ara-eng)": 0.63, + "Tatoeba (arq-eng)": 0.4, + "Tatoeba (arz-eng)": 0.63, + "Tatoeba (ast-eng)": 11.69, + "Tatoeba (awa-eng)": 0.0, + "Tatoeba (aze-eng)": 3.22, + "Tatoeba (bel-eng)": 1.75, + "Tatoeba (ben-eng)": 0.2, + "Tatoeba (ber-eng)": 7.0, + "Tatoeba (bos-eng)": 9.31, + "Tatoeba (bre-eng)": 4.17, + "Tatoeba (bul-eng)": 1.29, + "Tatoeba (cat-eng)": 7.73, + "Tatoeba (cbk-eng)": 5.61, + "Tatoeba (ceb-eng)": 4.88, + "Tatoeba (ces-eng)": 3.55, + "Tatoeba (cha-eng)": 19.29, + "Tatoeba (cmn-eng)": 0.5, + "Tatoeba (cor-eng)": 4.15, + "Tatoeba (csb-eng)": 5.69, + "Tatoeba (cym-eng)": 8.4, + "Tatoeba (dan-eng)": 6.99, + "Tatoeba (deu-eng)": 3.67, + "Tatoeba (dsb-eng)": 5.33, + "Tatoeba (dtp-eng)": 4.25, + "Tatoeba (ell-eng)": 0.63, + "Tatoeba (epo-eng)": 2.45, + "Tatoeba (est-eng)": 2.69, + "Tatoeba (eus-eng)": 4.69, + "Tatoeba (fao-eng)": 7.61, + "Tatoeba (fin-eng)": 3.36, + "Tatoeba (fra-eng)": 7.0, + "Tatoeba (fry-eng)": 12.36, + "Tatoeba (gla-eng)": 3.07, + "Tatoeba (gle-eng)": 4.81, + "Tatoeba (glg-eng)": 8.12, + "Tatoeba (gsw-eng)": 18.87, + "Tatoeba (heb-eng)": 0.68, + "Tatoeba (hin-eng)": 0.1, + "Tatoeba (hrv-eng)": 5.41, + "Tatoeba (hsb-eng)": 6.32, + "Tatoeba (hun-eng)": 3.42, + "Tatoeba (hye-eng)": 0.97, + "Tatoeba (ido-eng)": 7.1, + "Tatoeba (ile-eng)": 13.61, + "Tatoeba (ina-eng)": 8.57, + "Tatoeba (ind-eng)": 7.26, + "Tatoeba (isl-eng)": 4.09, + "Tatoeba (ita-eng)": 5.54, + "Tatoeba (jav-eng)": 11.43, + "Tatoeba (jpn-eng)": 0.2, + "Tatoeba (kab-eng)": 2.71, + "Tatoeba (kat-eng)": 1.11, + "Tatoeba (kaz-eng)": 1.17, + "Tatoeba (khm-eng)": 0.55, + "Tatoeba (kor-eng)": 0.5, + "Tatoeba (kur-eng)": 8.55, + "Tatoeba (kzj-eng)": 4.61, + "Tatoeba (lat-eng)": 4.07, + "Tatoeba (lfn-eng)": 2.83, + "Tatoeba (lit-eng)": 0.95, + "Tatoeba (lvs-eng)": 3.25, + "Tatoeba (mal-eng)": 0.29, + "Tatoeba (mar-eng)": 0.2, + "Tatoeba (max-eng)": 14.53, + "Tatoeba (mhr-eng)": 0.2, + "Tatoeba (mkd-eng)": 0.2, + "Tatoeba (mon-eng)": 1.1, + "Tatoeba (nds-eng)": 10.37, + "Tatoeba (nld-eng)": 9.5, + "Tatoeba (nno-eng)": 4.49, + "Tatoeba (nob-eng)": 4.95, + "Tatoeba (nov-eng)": 14.53, + "Tatoeba (oci-eng)": 5.8, + "Tatoeba (orv-eng)": 0.24, + "Tatoeba (pam-eng)": 6.65, + "Tatoeba (pes-eng)": 0.5, + "Tatoeba (pms-eng)": 8.05, + "Tatoeba (pol-eng)": 5.13, + "Tatoeba (por-eng)": 5.87, + "Tatoeba (ron-eng)": 6.76, + "Tatoeba (rus-eng)": 0.2, + "Tatoeba (slk-eng)": 4.23, + "Tatoeba (slv-eng)": 6.05, + "Tatoeba (spa-eng)": 5.03, + "Tatoeba (sqi-eng)": 4.36, + "Tatoeba (srp-eng)": 1.77, + "Tatoeba (swe-eng)": 6.72, + "Tatoeba (swg-eng)": 8.54, + "Tatoeba (swh-eng)": 11.49, + "Tatoeba (tam-eng)": 1.3, + "Tatoeba (tat-eng)": 0.77, + "Tatoeba (tel-eng)": 0.85, + "Tatoeba (tgl-eng)": 2.61, + "Tatoeba (tha-eng)": 0.69, + "Tatoeba (tuk-eng)": 5.76, + "Tatoeba (tur-eng)": 5.24, + "Tatoeba (tzl-eng)": 15.51, + "Tatoeba (uig-eng)": 0.6, + "Tatoeba (ukr-eng)": 1.23, + "Tatoeba (urd-eng)": 0.4, + "Tatoeba (uzb-eng)": 4.73, + "Tatoeba (vie-eng)": 6.55, + "Tatoeba (war-eng)": 4.12, + "Tatoeba (wuu-eng)": 0.2, + "Tatoeba (xho-eng)": 4.33, + "Tatoeba (yid-eng)": 0.59, + "Tatoeba (yue-eng)": 0.5, + "Tatoeba (zsm-eng)": 7.27 } ] }, "Classification": { "accuracy": [ { - "Model": "herbert-base-retrieval-v2", - "AllegroReviews": 34.11, - "CBD": 68.35, - "MassiveIntentClassification (pl)": 65.53, - "MassiveScenarioClassification (pl)": 68.51, - "PAC": 68.4, - "PolEmo2.0-IN": 64.18, - "PolEmo2.0-OUT": 45.73 + "Model": "komninos", + "AmazonCounterfactualClassification (en)": 60.54, + "AmazonPolarityClassification": 59.59, + "AmazonReviewsClassification (en)": 31.01, + "Banking77Classification": 67.05, + "EmotionClassification": 33.18, + "ImdbClassification": 63.98, + "MTOPDomainClassification (en)": 78.57, + "MTOPIntentClassification (en)": 57.07, + "MassiveIntentClassification (en)": 57.21, + "MassiveScenarioClassification (en)": 66.11, + "ToxicConversationsClassification": 67.76, + "TweetSentimentExtractionClassification": 49.68 } ] }, "Clustering": { "v_measure": [ { - "Model": "herbert-base-retrieval-v2", - "8TagsClustering": 28.15 + "Model": "komninos", + "ArxivClusteringP2P": 34.73, + "ArxivClusteringS2S": 26.01, + "BiorxivClusteringP2P": 29.76, + "BiorxivClusteringS2S": 20.71, + "BlurbsClusteringP2P": 11.37, + "BlurbsClusteringS2S": 8.01, + "MedrxivClusteringP2P": 26.65, + "MedrxivClusteringS2S": 21.5, + "RedditClustering": 28.84, + "RedditClusteringP2P": 7.37, + "StackExchangeClustering": 39.04, + "StackExchangeClusteringP2P": 30.23, + "TenKGnadClusteringP2P": 15.89, + "TenKGnadClusteringS2S": 4.84, + "TwentyNewsgroupsClustering": 27.42 } ] }, "PairClassification": { "ap": [ { - "Model": "herbert-base-retrieval-v2", - "CDSC-E": 63.31, - "PPC": 84.18, - "PSC": 98.87, - "SICK-E-PL": 54.93 + "Model": "komninos", + "SprintDuplicateQuestions": 85.55, + "TwitterSemEval2015": 53.85, + "TwitterURLCorpus": 79.41 } ] }, "Reranking": { "map": [ { - "Model": "herbert-base-retrieval-v2" + "Model": "komninos", + "AskUbuntuDupQuestions": 50.88, + "MindSmallReranking": 28.92, + "SciDocsRR": 63.55, + "StackOverflowDupQuestions": 35.65 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "herbert-base-retrieval-v2", - "ArguAna-PL": 41.97, - "DBPedia-PL": 24.07, - "FiQA-PL": 24.25, - "HotpotQA-PL": 43.41, - "MSMARCO-PL": 51.56, - "NFCorpus-PL": 25.95, - "NQ-PL": 35.09, - "Quora-PL": 78.86, - "SCIDOCS-PL": 11.0, - "SciFact-PL": 51.92, - "TRECCOVID-PL": 42.64 + "Model": "komninos", + "ArguAna": 30.96, + "CQADupstackRetrieval": 16.79, + "ClimateFEVER": 14.87, + "DBPedia": 15.88, + "FEVER": 15.56, + "FiQA2018": 10.49, + "HotpotQA": 20.77, + "MSMARCO": 9.75, + "NFCorpus": 11.79, + "NQ": 12.75, + "QuoraRetrieval": 71.57, + "SCIDOCS": 8.47, + "SciFact": 29.53, + "TRECCOVID": 35.92, + "Touche2020": 13.17 } ] }, "STS": { "spearman": [ { - "Model": "herbert-base-retrieval-v2", - "CDSC-R": 86.18, - "SICK-R-PL": 64.67, - "STS22 (pl)": 39.73 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "herbert-base-retrieval-v2" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "herbert-base-retrieval-v2" - } - ] - } - }, - "e5-base-v2": { - "BitextMining": { - "f1": [ - { - "Model": "e5-base-v2" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "e5-base-v2" - } - ] + "Model": "komninos", + "BIOSSES": 50.25, + "SICK-R": 55.49, + "STS12": 53.51, + "STS13": 70.8, + "STS14": 63.56, + "STS15": 74.08, + "STS16": 64.6, + "STS17 (ar-ar)": 13.78, + "STS17 (en-ar)": 9.08, + "STS17 (en-de)": -3.11, + "STS17 (en-en)": 76.91, + "STS17 (en-tr)": -0.45, + "STS17 (es-en)": -8.18, + "STS17 (es-es)": 48.23, + "STS17 (fr-en)": 5.81, + "STS17 (it-en)": 3.64, + "STS17 (ko-ko)": 2.54, + "STS17 (nl-en)": 0.44, + "STS22 (ar)": 32.42, + "STS22 (de)": 33.04, + "STS22 (de-en)": 28.65, + "STS22 (de-fr)": 14.77, + "STS22 (de-pl)": 11.21, + "STS22 (en)": 53.89, + "STS22 (es)": 48.53, + "STS22 (es-en)": 26.97, + "STS22 (es-it)": 41.1, + "STS22 (fr)": 49.43, + "STS22 (fr-pl)": 39.44, + "STS22 (it)": 57.77, + "STS22 (pl)": 12.47, + "STS22 (pl-en)": 45.55, + "STS22 (ru)": 19.44, + "STS22 (tr)": 47.38, + "STS22 (zh)": 4.78, + "STS22 (zh-en)": 14.05, + "STSBenchmark": 61.55 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "komninos", + "SummEval": 30.49 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "komninos" + } + ] + } + }, + "llama-2-7b-chat": { + "BitextMining": { + "f1": [ + { + "Model": "llama-2-7b-chat" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "llama-2-7b-chat" + } + ] }, "Clustering": { "v_measure": [ { - "Model": "e5-base-v2", - "BiorxivClusteringP2P": 37.12, - "BiorxivClusteringS2S": 33.41, - "MedrxivClusteringP2P": 31.82, - "MedrxivClusteringS2S": 29.68, - "RedditClustering": 56.54, - "RedditClusteringP2P": 63.23, - "StackExchangeClustering": 64.6, - "StackExchangeClusteringP2P": 33.02, - "TwentyNewsgroupsClustering": 49.86 + "Model": "llama-2-7b-chat" } ] }, "PairClassification": { "ap": [ { - "Model": "e5-base-v2" + "Model": "llama-2-7b-chat" } ] }, "Reranking": { "map": [ { - "Model": "e5-base-v2" + "Model": "llama-2-7b-chat" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "e5-base-v2" + "Model": "llama-2-7b-chat" } ] }, "STS": { "spearman": [ { - "Model": "e5-base-v2" + "Model": "llama-2-7b-chat" } ] }, "Summarization": { "spearman": [ { - "Model": "e5-base-v2" + "Model": "llama-2-7b-chat" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "e5-base-v2", - "Core17InstructionRetrieval": -2.9, - "News21InstructionRetrieval": -2.0, - "Robust04InstructionRetrieval": -6.73 + "Model": "llama-2-7b-chat", + "Core17InstructionRetrieval": 2.84, + "News21InstructionRetrieval": 0.23, + "Robust04InstructionRetrieval": 2.0 } ] } }, - "LASER2": { + "gtr-t5-xxl": { "BitextMining": { "f1": [ { - "Model": "LASER2", - "BUCC (de-en)": 99.21, - "BUCC (fr-en)": 98.39, - "BUCC (ru-en)": 97.62, - "BUCC (zh-en)": 97.7, - "Tatoeba (afr-eng)": 92.59, - "Tatoeba (amh-eng)": 80.82, - "Tatoeba (ang-eng)": 25.22, - "Tatoeba (ara-eng)": 90.14, - "Tatoeba (arq-eng)": 26.63, - "Tatoeba (arz-eng)": 66.16, - "Tatoeba (ast-eng)": 76.35, - "Tatoeba (awa-eng)": 33.74, - "Tatoeba (aze-eng)": 82.41, - "Tatoeba (bel-eng)": 79.54, - "Tatoeba (ben-eng)": 89.43, - "Tatoeba (ber-eng)": 77.63, - "Tatoeba (bos-eng)": 95.86, - "Tatoeba (bre-eng)": 31.2, - "Tatoeba (bul-eng)": 93.57, - "Tatoeba (cat-eng)": 95.8, - "Tatoeba (cbk-eng)": 77.17, - "Tatoeba (ceb-eng)": 9.93, - "Tatoeba (ces-eng)": 95.52, - "Tatoeba (cha-eng)": 14.86, - "Tatoeba (cmn-eng)": 85.62, - "Tatoeba (cor-eng)": 4.45, - "Tatoeba (csb-eng)": 27.03, - "Tatoeba (cym-eng)": 5.85, - "Tatoeba (dan-eng)": 95.22, - "Tatoeba (deu-eng)": 99.07, - "Tatoeba (dsb-eng)": 42.34, - "Tatoeba (dtp-eng)": 7.39, - "Tatoeba (ell-eng)": 96.2, - "Tatoeba (epo-eng)": 96.61, - "Tatoeba (est-eng)": 96.43, - "Tatoeba (eus-eng)": 93.32, - "Tatoeba (fao-eng)": 57.04, - "Tatoeba (fin-eng)": 96.98, - "Tatoeba (fra-eng)": 94.28, - "Tatoeba (fry-eng)": 42.07, - "Tatoeba (gla-eng)": 1.52, - "Tatoeba (gle-eng)": 4.2, - "Tatoeba (glg-eng)": 96.14, - "Tatoeba (gsw-eng)": 27.52, - "Tatoeba (heb-eng)": 0.0, - "Tatoeba (hin-eng)": 95.32, - "Tatoeba (hrv-eng)": 96.72, - "Tatoeba (hsb-eng)": 45.75, - "Tatoeba (hun-eng)": 95.2, - "Tatoeba (hye-eng)": 88.72, - "Tatoeba (ido-eng)": 80.86, - "Tatoeba (ile-eng)": 87.88, - "Tatoeba (ina-eng)": 93.93, - "Tatoeba (ind-eng)": 92.98, - "Tatoeba (isl-eng)": 94.32, - "Tatoeba (ita-eng)": 94.32, - "Tatoeba (jav-eng)": 9.95, - "Tatoeba (jpn-eng)": 93.78, - "Tatoeba (kab-eng)": 65.88, - "Tatoeba (kat-eng)": 81.16, - "Tatoeba (kaz-eng)": 53.3, - "Tatoeba (khm-eng)": 74.19, - "Tatoeba (kor-eng)": 87.97, - "Tatoeba (kur-eng)": 19.09, - "Tatoeba (kzj-eng)": 4.46, - "Tatoeba (lat-eng)": 64.81, - "Tatoeba (lfn-eng)": 63.39, - "Tatoeba (lit-eng)": 96.2, - "Tatoeba (lvs-eng)": 95.33, - "Tatoeba (mal-eng)": 98.16, - "Tatoeba (mar-eng)": 92.93, - "Tatoeba (max-eng)": 36.96, - "Tatoeba (mhr-eng)": 6.86, - "Tatoeba (mkd-eng)": 93.63, - "Tatoeba (mon-eng)": 3.42, - "Tatoeba (nds-eng)": 77.13, - "Tatoeba (nld-eng)": 95.35, - "Tatoeba (nno-eng)": 72.75, - "Tatoeba (nob-eng)": 95.77, - "Tatoeba (nov-eng)": 60.02, - "Tatoeba (oci-eng)": 58.13, - "Tatoeba (orv-eng)": 23.24, - "Tatoeba (pam-eng)": 3.24, - "Tatoeba (pes-eng)": 93.13, - "Tatoeba (pms-eng)": 36.23, - "Tatoeba (pol-eng)": 97.32, - "Tatoeba (por-eng)": 94.54, - "Tatoeba (ron-eng)": 96.52, - "Tatoeba (rus-eng)": 92.58, - "Tatoeba (slk-eng)": 95.82, - "Tatoeba (slv-eng)": 95.4, - "Tatoeba (spa-eng)": 97.33, - "Tatoeba (sqi-eng)": 97.22, - "Tatoeba (srp-eng)": 93.64, - "Tatoeba (swe-eng)": 95.31, - "Tatoeba (swg-eng)": 33.1, - "Tatoeba (swh-eng)": 55.66, - "Tatoeba (tam-eng)": 87.32, - "Tatoeba (tat-eng)": 34.74, - "Tatoeba (tel-eng)": 96.72, - "Tatoeba (tgl-eng)": 63.19, - "Tatoeba (tha-eng)": 96.38, - "Tatoeba (tuk-eng)": 16.35, - "Tatoeba (tur-eng)": 98.03, - "Tatoeba (tzl-eng)": 36.56, - "Tatoeba (uig-eng)": 56.49, - "Tatoeba (ukr-eng)": 93.52, - "Tatoeba (urd-eng)": 84.23, - "Tatoeba (uzb-eng)": 23.2, - "Tatoeba (vie-eng)": 96.73, - "Tatoeba (war-eng)": 8.25, - "Tatoeba (wuu-eng)": 75.09, - "Tatoeba (xho-eng)": 4.68, - "Tatoeba (yid-eng)": 2.49, - "Tatoeba (yue-eng)": 87.75, - "Tatoeba (zsm-eng)": 95.41 + "Model": "gtr-t5-xxl" } ] }, "Classification": { "accuracy": [ { - "Model": "LASER2", - "AmazonCounterfactualClassification (de)": 67.82, - "AmazonCounterfactualClassification (en)": 76.84, - "AmazonCounterfactualClassification (en-ext)": 76.17, - "AmazonCounterfactualClassification (ja)": 68.76, - "AmazonPolarityClassification": 61.01, - "AmazonReviewsClassification (de)": 31.07, - "AmazonReviewsClassification (en)": 28.71, - "AmazonReviewsClassification (es)": 32.72, - "AmazonReviewsClassification (fr)": 31.12, - "AmazonReviewsClassification (ja)": 28.94, - "AmazonReviewsClassification (zh)": 30.89, - "Banking77Classification": 57.76, - "EmotionClassification": 24.83, - "ImdbClassification": 57.58, - "MTOPDomainClassification (de)": 74.08, - "MTOPDomainClassification (en)": 75.36, - "MTOPDomainClassification (es)": 73.47, - "MTOPDomainClassification (fr)": 72.26, - "MTOPDomainClassification (hi)": 72.95, - "MTOPDomainClassification (th)": 72.68, - "MTOPIntentClassification (de)": 51.62, - "MTOPIntentClassification (en)": 49.47, - "MTOPIntentClassification (es)": 52.75, - "MTOPIntentClassification (fr)": 50.12, - "MTOPIntentClassification (hi)": 45.55, - "MTOPIntentClassification (th)": 50.07, - "MasakhaNEWSClassification (fra)": 65.9, - "MassiveIntentClassification (af)": 38.01, - "MassiveIntentClassification (am)": 12.7, - "MassiveIntentClassification (ar)": 37.16, - "MassiveIntentClassification (az)": 19.98, - "MassiveIntentClassification (bn)": 42.51, - "MassiveIntentClassification (cy)": 17.33, - "MassiveIntentClassification (da)": 45.61, - "MassiveIntentClassification (de)": 44.79, - "MassiveIntentClassification (el)": 46.71, - "MassiveIntentClassification (en)": 47.91, - "MassiveIntentClassification (es)": 45.44, - "MassiveIntentClassification (fa)": 45.01, - "MassiveIntentClassification (fi)": 45.94, - "MassiveIntentClassification (fr)": 46.13, - "MassiveIntentClassification (he)": 42.55, - "MassiveIntentClassification (hi)": 40.2, - "MassiveIntentClassification (hu)": 42.77, - "MassiveIntentClassification (hy)": 28.07, - "MassiveIntentClassification (id)": 45.81, - "MassiveIntentClassification (is)": 39.86, - "MassiveIntentClassification (it)": 48.25, - "MassiveIntentClassification (ja)": 45.3, - "MassiveIntentClassification (jv)": 24.3, - "MassiveIntentClassification (ka)": 22.7, - "MassiveIntentClassification (km)": 22.48, - "MassiveIntentClassification (kn)": 4.32, - "MassiveIntentClassification (ko)": 44.26, - "MassiveIntentClassification (lv)": 39.75, - "MassiveIntentClassification (ml)": 41.33, - "MassiveIntentClassification (mn)": 16.2, - "MassiveIntentClassification (ms)": 43.23, - "MassiveIntentClassification (my)": 25.37, - "MassiveIntentClassification (nb)": 37.74, - "MassiveIntentClassification (nl)": 45.0, - "MassiveIntentClassification (pl)": 44.99, - "MassiveIntentClassification (pt)": 48.55, - "MassiveIntentClassification (ro)": 44.3, - "MassiveIntentClassification (ru)": 44.29, - "MassiveIntentClassification (sl)": 44.72, - "MassiveIntentClassification (sq)": 46.12, - "MassiveIntentClassification (sv)": 45.95, - "MassiveIntentClassification (sw)": 31.89, - "MassiveIntentClassification (ta)": 29.63, - "MassiveIntentClassification (te)": 36.03, - "MassiveIntentClassification (th)": 43.39, - "MassiveIntentClassification (tl)": 29.73, - "MassiveIntentClassification (tr)": 43.93, - "MassiveIntentClassification (ur)": 26.11, - "MassiveIntentClassification (vi)": 44.33, - "MassiveIntentClassification (zh-CN)": 40.62, - "MassiveIntentClassification (zh-TW)": 32.93, - "MassiveScenarioClassification (af)": 47.1, - "MassiveScenarioClassification (am)": 17.7, - "MassiveScenarioClassification (ar)": 45.21, - "MassiveScenarioClassification (az)": 28.21, - "MassiveScenarioClassification (bn)": 50.52, - "MassiveScenarioClassification (cy)": 22.58, - "MassiveScenarioClassification (da)": 54.87, - "MassiveScenarioClassification (de)": 54.34, - "MassiveScenarioClassification (el)": 55.47, - "MassiveScenarioClassification (en)": 55.92, - "MassiveScenarioClassification (es)": 52.77, - "MassiveScenarioClassification (fa)": 52.5, - "MassiveScenarioClassification (fi)": 52.63, - "MassiveScenarioClassification (fr)": 54.32, - "MassiveScenarioClassification (he)": 52.41, - "MassiveScenarioClassification (hi)": 47.37, - "MassiveScenarioClassification (hu)": 53.43, - "MassiveScenarioClassification (hy)": 33.57, - "MassiveScenarioClassification (id)": 54.38, - "MassiveScenarioClassification (is)": 49.78, - "MassiveScenarioClassification (it)": 54.84, - "MassiveScenarioClassification (ja)": 54.12, - "MassiveScenarioClassification (jv)": 32.71, - "MassiveScenarioClassification (ka)": 26.92, - "MassiveScenarioClassification (km)": 27.23, - "MassiveScenarioClassification (kn)": 10.06, - "MassiveScenarioClassification (ko)": 52.01, - "MassiveScenarioClassification (lv)": 44.82, - "MassiveScenarioClassification (ml)": 49.1, - "MassiveScenarioClassification (mn)": 21.51, - "MassiveScenarioClassification (ms)": 53.6, - "MassiveScenarioClassification (my)": 29.72, - "MassiveScenarioClassification (nb)": 43.9, - "MassiveScenarioClassification (nl)": 53.33, - "MassiveScenarioClassification (pl)": 52.92, - "MassiveScenarioClassification (pt)": 53.41, - "MassiveScenarioClassification (ro)": 50.48, - "MassiveScenarioClassification (ru)": 51.84, - "MassiveScenarioClassification (sl)": 51.29, - "MassiveScenarioClassification (sq)": 55.65, - "MassiveScenarioClassification (sv)": 54.64, - "MassiveScenarioClassification (sw)": 42.04, - "MassiveScenarioClassification (ta)": 36.72, - "MassiveScenarioClassification (te)": 42.08, - "MassiveScenarioClassification (th)": 52.15, - "MassiveScenarioClassification (tl)": 37.34, - "MassiveScenarioClassification (tr)": 52.56, - "MassiveScenarioClassification (ur)": 32.6, - "MassiveScenarioClassification (vi)": 50.97, - "MassiveScenarioClassification (zh-CN)": 50.22, - "MassiveScenarioClassification (zh-TW)": 42.32, - "ToxicConversationsClassification": 54.05, - "TweetSentimentExtractionClassification": 48.73 + "Model": "gtr-t5-xxl", + "AmazonCounterfactualClassification (en)": 67.3, + "AmazonPolarityClassification": 75.05, + "AmazonReviewsClassification (en)": 37.3, + "Banking77Classification": 82.32, + "EmotionClassification": 43.19, + "ImdbClassification": 70.8, + "MTOPDomainClassification (en)": 93.84, + "MTOPIntentClassification (en)": 67.71, + "MassiveIntentClassification (en)": 70.61, + "MassiveScenarioClassification (en)": 77.77, + "ToxicConversationsClassification": 68.48, + "TweetSentimentExtractionClassification": 54.54 } ] }, "Clustering": { "v_measure": [ { - "Model": "LASER2", - "AlloProfClusteringP2P": 48.45, - "AlloProfClusteringS2S": 25.81, - "ArxivClusteringP2P": 17.77, - "ArxivClusteringS2S": 12.39, - "BiorxivClusteringP2P": 12.4, - "BiorxivClusteringS2S": 8.83, - "HALClusteringS2S": 11.52, - "MLSUMClusteringP2P": 34.53, - "MLSUMClusteringS2S": 27.35, - "MasakhaNEWSClusteringP2P (fra)": 32.04, - "MasakhaNEWSClusteringS2S (fra)": 29.77, - "MedrxivClusteringP2P": 17.91, - "MedrxivClusteringS2S": 16.63, - "RedditClustering": 9.96, - "RedditClusteringP2P": 26.42, - "StackExchangeClustering": 15.79, - "StackExchangeClusteringP2P": 18.63, - "TwentyNewsgroupsClustering": 11.38 + "Model": "gtr-t5-xxl", + "ArxivClusteringP2P": 37.9, + "ArxivClusteringS2S": 32.39, + "BiorxivClusteringP2P": 30.48, + "BiorxivClusteringS2S": 27.5, + "MedrxivClusteringP2P": 29.12, + "MedrxivClusteringS2S": 27.56, + "RedditClustering": 64.13, + "RedditClusteringP2P": 62.84, + "StackExchangeClustering": 71.43, + "StackExchangeClusteringP2P": 32.85, + "TwentyNewsgroupsClustering": 50.44 } ] }, "PairClassification": { "ap": [ { - "Model": "LASER2", - "OpusparcusPC (fr)": 93.77, - "PawsXPairClassification (fr)": 69.53, - "SprintDuplicateQuestions": 65.54, - "TwitterSemEval2015": 59.57, - "TwitterURLCorpus": 81.47 + "Model": "gtr-t5-xxl", + "SprintDuplicateQuestions": 95.68, + "TwitterSemEval2015": 77.54, + "TwitterURLCorpus": 85.13 } ] }, "Reranking": { "map": [ { - "Model": "LASER2", - "AlloprofReranking": 35.29, - "AskUbuntuDupQuestions": 48.99, - "MindSmallReranking": 24.79, - "SciDocsRR": 54.99, - "StackOverflowDupQuestions": 36.98, - "SyntecReranking": 55.93 + "Model": "gtr-t5-xxl", + "AskUbuntuDupQuestions": 63.23, + "MindSmallReranking": 31.93, + "SciDocsRR": 77.96, + "StackOverflowDupQuestions": 53.5 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "LASER2", - "AlloprofRetrieval": 3.1, - "ArguAna": 12.86, - "BSARDRetrieval": 0.36, - "CQADupstackRetrieval": 4.12, - "ClimateFEVER": 0.36, - "DBPedia": 1.53, - "FEVER": 0.77, - "FiQA2018": 1.73, - "HotpotQA": 5.5, - "MSMARCO": 1.09, - "MintakaRetrieval (fr)": 6.31, - "NFCorpus": 2.44, - "NQ": 0.64, - "QuoraRetrieval": 71.14, - "SCIDOCS": 0.78, - "SciFact": 4.04, - "SyntecRetrieval": 28.58, - "TRECCOVID": 10.97, - "Touche2020": 1.06, - "XPQARetrieval (fr)": 42.59 + "Model": "gtr-t5-xxl", + "ArguAna": 53.77, + "CQADupstackRetrieval": 38.56, + "ClimateFEVER": 27.21, + "DBPedia": 41.28, + "FEVER": 74.08, + "FiQA2018": 46.78, + "HotpotQA": 59.67, + "MSMARCO": 44.05, + "NFCorpus": 34.18, + "NQ": 57.24, + "QuoraRetrieval": 89.09, + "SCIDOCS": 15.88, + "SciFact": 66.77, + "TRECCOVID": 51.9, + "Touche2020": 26.76 } ] }, "STS": { "spearman": [ { - "Model": "LASER2", - "BIOSSES": 62.01, - "SICK-R": 62.86, - "SICKFr": 64.95, - "STS12": 62.6, - "STS13": 59.62, - "STS14": 57.03, - "STS15": 71.57, - "STS16": 70.75, - "STS17 (ar-ar)": 67.47, - "STS17 (en-ar)": 65.05, - "STS17 (en-de)": 66.66, - "STS17 (en-en)": 76.73, - "STS17 (en-tr)": 70.05, - "STS17 (es-en)": 55.3, - "STS17 (es-es)": 79.67, - "STS17 (fr-en)": 70.82, - "STS17 (it-en)": 70.98, - "STS17 (ko-ko)": 70.52, - "STS17 (nl-en)": 68.12, - "STS22 (ar)": 42.57, - "STS22 (de)": 25.69, - "STS22 (de-en)": 32.35, - "STS22 (de-fr)": 37.41, - "STS22 (de-pl)": 15.67, - "STS22 (en)": 39.76, - "STS22 (es)": 54.92, - "STS22 (es-en)": 54.34, - "STS22 (es-it)": 42.21, - "STS22 (fr)": 58.61, - "STS22 (fr-pl)": 39.44, - "STS22 (it)": 60.31, - "STS22 (pl)": 18.34, - "STS22 (pl-en)": 53.63, - "STS22 (ru)": 39.24, - "STS22 (tr)": 36.97, - "STS22 (zh)": 49.41, - "STS22 (zh-en)": 46.19, - "STSBenchmark": 69.77, - "STSBenchmarkMultilingualSTS (fr)": 69.82 + "Model": "gtr-t5-xxl", + "BIOSSES": 81.91, + "SICK-R": 74.29, + "STS12": 70.12, + "STS13": 82.72, + "STS14": 78.24, + "STS15": 86.26, + "STS16": 81.61, + "STS17 (en-en)": 85.18, + "STS22 (en)": 65.76, + "STSBenchmark": 77.73 } ] }, "Summarization": { "spearman": [ { - "Model": "LASER2", - "SummEval": 26.8, - "SummEvalFr": 31.56 + "Model": "gtr-t5-xxl", + "SummEval": 30.64 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "LASER2" + "Model": "gtr-t5-xxl" } ] } }, - "udever-bloom-1b1": { + "voyage-lite-02-instruct": { "BitextMining": { "f1": [ { - "Model": "udever-bloom-1b1" + "Model": "voyage-lite-02-instruct" } ] }, "Classification": { "accuracy": [ { - "Model": "udever-bloom-1b1", - "AmazonReviewsClassification (fr)": 35.12, - "MTOPDomainClassification (fr)": 69.24, - "MTOPIntentClassification (fr)": 51.25, - "MasakhaNEWSClassification (fra)": 80.83, - "MassiveIntentClassification (fr)": 43.21, - "MassiveScenarioClassification (fr)": 49.78 + "Model": "voyage-lite-02-instruct", + "AmazonCounterfactualClassification (en)": 88.31, + "AmazonPolarityClassification": 96.32, + "AmazonReviewsClassification (en)": 56.25, + "Banking77Classification": 88.59, + "EmotionClassification": 50.28, + "ImdbClassification": 95.75, + "MTOPDomainClassification (en)": 97.65, + "MTOPIntentClassification (en)": 75.16, + "MassiveIntentClassification (en)": 73.97, + "MassiveScenarioClassification (en)": 83.99, + "ToxicConversationsClassification": 81.75, + "TweetSentimentExtractionClassification": 62.98 } ] }, "Clustering": { "v_measure": [ { - "Model": "udever-bloom-1b1", - "AlloProfClusteringP2P": 62.22, - "AlloProfClusteringS2S": 27.06, - "HALClusteringS2S": 13.86, - "MLSUMClusteringP2P": 44.11, - "MLSUMClusteringS2S": 30.47, - "MasakhaNEWSClusteringP2P (fra)": 40.2, - "MasakhaNEWSClusteringS2S (fra)": 27.35 + "Model": "voyage-lite-02-instruct", + "ArxivClusteringP2P": 51.95, + "ArxivClusteringS2S": 42.48, + "BiorxivClusteringP2P": 50.15, + "BiorxivClusteringS2S": 42.84, + "MedrxivClusteringP2P": 47.24, + "MedrxivClusteringS2S": 43.48, + "RedditClustering": 63.73, + "RedditClusteringP2P": 64.09, + "StackExchangeClustering": 70.71, + "StackExchangeClusteringP2P": 40.34, + "TwentyNewsgroupsClustering": 59.56 } ] }, "PairClassification": { "ap": [ { - "Model": "udever-bloom-1b1", - "OpusparcusPC (fr)": 85.54, - "PawsXPairClassification (fr)": 61.99 + "Model": "voyage-lite-02-instruct", + "SprintDuplicateQuestions": 98.07, + "TwitterSemEval2015": 74.44, + "TwitterURLCorpus": 88.11 } ] }, "Reranking": { "map": [ { - "Model": "udever-bloom-1b1", - "AlloprofReranking": 39.13, - "SyntecReranking": 62.58 + "Model": "voyage-lite-02-instruct", + "AskUbuntuDupQuestions": 63.24, + "MindSmallReranking": 31.48, + "SciDocsRR": 84.68, + "StackOverflowDupQuestions": 53.56 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "udever-bloom-1b1", - "AlloprofRetrieval": 12.37, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 2.78, - "SyntecRetrieval": 40.57, - "XPQARetrieval (fr)": 33.82 + "Model": "voyage-lite-02-instruct", + "ArguAna": 70.28, + "CQADupstackRetrieval": 46.2, + "ClimateFEVER": 31.95, + "DBPedia": 39.79, + "FEVER": 91.35, + "FiQA2018": 52.51, + "HotpotQA": 75.51, + "MSMARCO": 37.93, + "NFCorpus": 43.7, + "NQ": 64.26, + "QuoraRetrieval": 87.62, + "SCIDOCS": 20.24, + "SciFact": 79.91, + "TRECCOVID": 81.02, + "Touche2020": 26.8 } ] }, "STS": { "spearman": [ { - "Model": "udever-bloom-1b1", - "SICKFr": 59.94, - "STS22 (fr)": 77.1, - "STSBenchmarkMultilingualSTS (fr)": 49.97 + "Model": "voyage-lite-02-instruct", + "BIOSSES": 89.7, + "SICK-R": 78.44, + "STS12": 86.46, + "STS13": 87.76, + "STS14": 86.6, + "STS15": 90.1, + "STS16": 86.39, + "STS17 (en-en)": 86.98, + "STS22 (en)": 76.89, + "STSBenchmark": 88.56 } ] }, "Summarization": { "spearman": [ { - "Model": "udever-bloom-1b1", - "SummEvalFr": 29.48 + "Model": "voyage-lite-02-instruct", + "SummEval": 31.01 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "udever-bloom-1b1" + "Model": "voyage-lite-02-instruct" } ] } }, - "deberta-v1-base": { + "monot5-base-msmarco-10k": { "BitextMining": { "f1": [ { - "Model": "deberta-v1-base", - "Tatoeba (rus-Cyrl_eng-Latn)": 13.21 + "Model": "monot5-base-msmarco-10k" } ] }, "Classification": { "accuracy": [ { - "Model": "deberta-v1-base", - "GeoreviewClassification (rus-Cyrl)": 40.19, - "HeadlineClassification (rus-Cyrl)": 78.75, - "InappropriatenessClassification (rus-Cyrl)": 61.33, - "KinopoiskClassification (rus-Cyrl)": 48.78, - "MassiveIntentClassification (rus-Cyrl)": 61.32, - "MassiveScenarioClassification (rus-Cyrl)": 64.71, - "RuReviewsClassification (rus-Cyrl)": 55.66, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 53.53, - "RuSciBenchOECDClassification (rus-Cyrl)": 41.34 + "Model": "monot5-base-msmarco-10k" } ] }, "Clustering": { "v_measure": [ { - "Model": "deberta-v1-base", - "GeoreviewClusteringP2P (rus-Cyrl)": 58.79, - "MLSUMClusteringP2P (rus-Cyrl)": 47.33, - "MLSUMClusteringS2S (rus-Cyrl)": 44.6, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 36.66, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 33.31 + "Model": "monot5-base-msmarco-10k" } ] }, "PairClassification": { "ap": [ { - "Model": "deberta-v1-base", - "OpusparcusPC (rus-Cyrl)": 83.31, - "TERRa (rus-Cyrl)": 53.78 + "Model": "monot5-base-msmarco-10k" } ] }, "Reranking": { "map": [ { - "Model": "deberta-v1-base", - "RuBQReranking (rus-Cyrl)": 34.01 + "Model": "monot5-base-msmarco-10k" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "deberta-v1-base", - "RiaNewsRetrieval (rus-Cyrl)": 4.84, - "RuBQRetrieval (rus-Cyrl)": 10.15 + "Model": "monot5-base-msmarco-10k" } ] }, "STS": { "spearman": [ { - "Model": "deberta-v1-base", - "RUParaPhraserSTS (rus-Cyrl)": 54.03, - "RuSTSBenchmarkSTS (rus-Cyrl)": 58.47, - "STS22 (rus-Cyrl)": 47.67, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 58.45 + "Model": "monot5-base-msmarco-10k" } ] }, "Summarization": { "spearman": [ { - "Model": "deberta-v1-base" + "Model": "monot5-base-msmarco-10k" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "deberta-v1-base" + "Model": "monot5-base-msmarco-10k", + "Core17InstructionRetrieval": -4.06, + "News21InstructionRetrieval": 5.02, + "Robust04InstructionRetrieval": -6.2 } ] } }, - "gte-Qwen2-7B-instruct": { + "FollowIR-7B": { "BitextMining": { "f1": [ { - "Model": "gte-Qwen2-7B-instruct" + "Model": "FollowIR-7B" } ] }, "Classification": { "accuracy": [ { - "Model": "gte-Qwen2-7B-instruct" + "Model": "FollowIR-7B" } ] }, "Clustering": { "v_measure": [ { - "Model": "gte-Qwen2-7B-instruct" + "Model": "FollowIR-7B" } ] }, "PairClassification": { "ap": [ { - "Model": "gte-Qwen2-7B-instruct" + "Model": "FollowIR-7B" } ] }, "Reranking": { "map": [ { - "Model": "gte-Qwen2-7B-instruct" + "Model": "FollowIR-7B" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "gte-Qwen2-7B-instruct", - "BrightRetrieval (earth_science)": 40.66, - "BrightRetrieval (sustainable_living)": 20.82, - "BrightRetrieval (theoremqa_theorems)": 28.15, - "BrightRetrieval (aops)": 15.1, - "BrightRetrieval (economics)": 16.18, - "BrightRetrieval (pony)": 1.25, - "BrightRetrieval (stackoverflow)": 13.95, - "BrightRetrieval (leetcode)": 31.07, - "BrightRetrieval (biology)": 32.09, - "BrightRetrieval (theoremqa_questions)": 29.9, - "BrightRetrieval (robotics)": 12.82, - "BrightRetrieval (psychology)": 26.58 + "Model": "FollowIR-7B" } ] }, "STS": { "spearman": [ { - "Model": "gte-Qwen2-7B-instruct" + "Model": "FollowIR-7B" } ] }, "Summarization": { "spearman": [ { - "Model": "gte-Qwen2-7B-instruct" + "Model": "FollowIR-7B" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "gte-Qwen2-7B-instruct" + "Model": "FollowIR-7B", + "Core17InstructionRetrieval": 16.48, + "News21InstructionRetrieval": 6.26, + "Robust04InstructionRetrieval": 13.72 } ] } }, - "contriever-base-msmarco": { + "LLM2Vec-Mistral-supervised": { "BitextMining": { "f1": [ { - "Model": "contriever-base-msmarco" + "Model": "LLM2Vec-Mistral-supervised" } ] }, "Classification": { "accuracy": [ { - "Model": "contriever-base-msmarco", - "AmazonCounterfactualClassification (en)": 72.19, - "AmazonPolarityClassification": 68.63, - "AmazonReviewsClassification (en)": 37.42, - "Banking77Classification": 80.02, - "EmotionClassification": 44.77, - "ImdbClassification": 67.04, - "MTOPDomainClassification (en)": 93.18, - "MTOPIntentClassification (en)": 69.31, - "MassiveIntentClassification (en)": 67.78, - "MassiveScenarioClassification (en)": 76.0, - "ToxicConversationsClassification": 67.77, - "TweetSentimentExtractionClassification": 56.1 + "Model": "LLM2Vec-Mistral-supervised", + "AmazonCounterfactualClassification (en)": 77.58, + "AmazonPolarityClassification": 91.12, + "AmazonReviewsClassification (en)": 49.97, + "Banking77Classification": 88.31, + "EmotionClassification": 52.04, + "ImdbClassification": 87.42, + "MTOPDomainClassification (en)": 96.04, + "MTOPIntentClassification (en)": 84.77, + "MassiveIntentClassification (en)": 79.29, + "MassiveScenarioClassification (en)": 81.64, + "ToxicConversationsClassification": 69.26, + "TweetSentimentExtractionClassification": 62.14 } ] }, "Clustering": { "v_measure": [ { - "Model": "contriever-base-msmarco", - "ArxivClusteringP2P": 42.61, - "ArxivClusteringS2S": 32.32, - "BiorxivClusteringP2P": 34.97, - "BiorxivClusteringS2S": 29.08, - "MedrxivClusteringP2P": 31.19, - "MedrxivClusteringS2S": 27.27, - "RedditClustering": 54.89, - "RedditClusteringP2P": 57.58, - "StackExchangeClustering": 63.15, - "StackExchangeClusteringP2P": 32.25, - "TwentyNewsgroupsClustering": 46.82 + "Model": "LLM2Vec-Mistral-supervised", + "ArxivClusteringP2P": 42.81, + "ArxivClusteringS2S": 44.24, + "BiorxivClusteringP2P": 34.27, + "BiorxivClusteringS2S": 35.53, + "MedrxivClusteringP2P": 31.07, + "MedrxivClusteringS2S": 31.27, + "RedditClustering": 60.24, + "RedditClusteringP2P": 64.12, + "StackExchangeClustering": 70.73, + "StackExchangeClusteringP2P": 34.5, + "TwentyNewsgroupsClustering": 52.18 } ] }, "PairClassification": { "ap": [ { - "Model": "contriever-base-msmarco", - "SprintDuplicateQuestions": 95.55, - "TwitterSemEval2015": 66.85, - "TwitterURLCorpus": 85.21 + "Model": "LLM2Vec-Mistral-supervised", + "SprintDuplicateQuestions": 96.82, + "TwitterSemEval2015": 80.6, + "TwitterURLCorpus": 86.56 } ] }, "Reranking": { "map": [ { - "Model": "contriever-base-msmarco", - "AskUbuntuDupQuestions": 56.69, - "MindSmallReranking": 31.58, - "SciDocsRR": 76.51, - "StackOverflowDupQuestions": 47.78 - } - ] - }, - "Retrieval": { + "Model": "LLM2Vec-Mistral-supervised", + "AskUbuntuDupQuestions": 63.98, + "MindSmallReranking": 31.5, + "SciDocsRR": 83.8, + "StackOverflowDupQuestions": 54.41 + } + ] + }, + "Retrieval": { "ndcg_at_10": [ { - "Model": "contriever-base-msmarco", - "ArguAna": 48.32, - "CQADupstackRetrieval": 33.67, - "ClimateFEVER": 24.79, - "DBPedia": 38.1, - "FEVER": 59.29, - "FiQA2018": 27.42, - "HotpotQA": 56.81, - "MSMARCO": 36.77, - "NFCorpus": 31.32, - "NQ": 41.83, - "QuoraRetrieval": 86.72, - "SCIDOCS": 17.12, - "SciFact": 65.51, - "TRECCOVID": 44.77, - "Touche2020": 15.79 + "Model": "LLM2Vec-Mistral-supervised", + "ArguAna": 57.48, + "CQADupstackRetrieval": 48.84, + "ClimateFEVER": 35.19, + "DBPedia": 49.58, + "FEVER": 89.4, + "FiQA2018": 53.11, + "HotpotQA": 74.07, + "MSMARCO": 42.17, + "NFCorpus": 39.33, + "NQ": 61.7, + "QuoraRetrieval": 87.75, + "SCIDOCS": 22.5, + "SciFact": 78.86, + "TRECCOVID": 77.69, + "Touche2020": 22.18 } ] }, "STS": { "spearman": [ { - "Model": "contriever-base-msmarco", - "BIOSSES": 83.32, - "SICK-R": 70.2, - "STS12": 64.34, - "STS13": 80.03, - "STS14": 74.51, - "STS15": 83.3, - "STS16": 79.67, - "STS17 (en-en)": 86.32, - "STS22 (en)": 64.64, - "STSBenchmark": 78.81 + "Model": "LLM2Vec-Mistral-supervised", + "BIOSSES": 85.24, + "SICK-R": 83.7, + "STS12": 78.8, + "STS13": 86.37, + "STS14": 84.04, + "STS15": 88.99, + "STS16": 87.22, + "STS17 (en-en)": 90.19, + "STS22 (en)": 67.68, + "STSBenchmark": 88.65 } ] }, "Summarization": { "spearman": [ { - "Model": "contriever-base-msmarco", - "SummEval": 30.36 + "Model": "LLM2Vec-Mistral-supervised", + "SummEval": 29.96 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "contriever-base-msmarco", - "Core17InstructionRetrieval": -2.48, - "News21InstructionRetrieval": -2.83, - "Robust04InstructionRetrieval": -6.12 + "Model": "LLM2Vec-Mistral-supervised" } ] } }, - "unsup-simcse-bert-base-uncased": { + "text-embedding-ada-002": { "BitextMining": { "f1": [ { - "Model": "unsup-simcse-bert-base-uncased" + "Model": "text-embedding-ada-002" } ] }, "Classification": { "accuracy": [ { - "Model": "unsup-simcse-bert-base-uncased", - "AmazonCounterfactualClassification (en)": 67.09, - "AmazonPolarityClassification": 74.48, - "AmazonReviewsClassification (en)": 33.85, - "Banking77Classification": 73.55, - "EmotionClassification": 42.22, - "ImdbClassification": 69.63, - "MTOPDomainClassification (en)": 81.71, - "MTOPIntentClassification (en)": 59.23, - "MassiveIntentClassification (en)": 59.84, - "MassiveScenarioClassification (en)": 66.25, - "ToxicConversationsClassification": 68.82, - "TweetSentimentExtractionClassification": 53.36 + "Model": "text-embedding-ada-002", + "AmazonCounterfactualClassification (en)": 75.94, + "AmazonPolarityClassification": 86.72, + "AmazonReviewsClassification (zh)": 38.3, + "AmazonReviewsClassification (en)": 44.78, + "AmazonReviewsClassification (fr)": 43.76, + "Banking77Classification": 80.66, + "EmotionClassification": 48.74, + "IFlyTek": 44.62, + "ImdbClassification": 77.98, + "JDReview": 74.6, + "MTOPDomainClassification (en)": 92.13, + "MTOPDomainClassification (fr)": 89.38, + "MTOPIntentClassification (en)": 64.68, + "MTOPIntentClassification (fr)": 64.45, + "MasakhaNEWSClassification (fra)": 81.52, + "MassiveIntentClassification (zh-CN)": 64.81, + "MassiveIntentClassification (en)": 70.15, + "MassiveIntentClassification (fr)": 65.42, + "MassiveScenarioClassification (zh-CN)": 71.4, + "MassiveScenarioClassification (en)": 75.33, + "MassiveScenarioClassification (fr)": 71.11, + "MultilingualSentiment": 67.99, + "OnlineShopping": 88.94, + "TNews": 45.77, + "ToxicConversationsClassification": 72.29, + "TweetSentimentExtractionClassification": 61.81, + "Waimai": 82.37 } ] }, "Clustering": { "v_measure": [ { - "Model": "unsup-simcse-bert-base-uncased", - "ArxivClusteringP2P": 32.61, - "ArxivClusteringS2S": 24.68, - "BiorxivClusteringP2P": 24.9, - "BiorxivClusteringS2S": 19.55, - "MedrxivClusteringP2P": 23.6, - "MedrxivClusteringS2S": 21.97, - "RedditClustering": 32.18, - "RedditClusteringP2P": 45.14, - "StackExchangeClustering": 43.07, - "StackExchangeClusteringP2P": 28.5, - "TwentyNewsgroupsClustering": 23.21 + "Model": "text-embedding-ada-002", + "AlloProfClusteringP2P": 64.83, + "AlloProfClusteringS2S": 53.52, + "ArxivClusteringP2P": 45.01, + "ArxivClusteringS2S": 36.85, + "BiorxivClusteringP2P": 36.66, + "BiorxivClusteringS2S": 34.21, + "CLSClusteringP2P": 38.26, + "CLSClusteringS2S": 35.91, + "HALClusteringS2S": 26.18, + "MLSUMClusteringP2P": 44.59, + "MLSUMClusteringS2S": 41.67, + "MasakhaNEWSClusteringP2P (fra)": 68.35, + "MasakhaNEWSClusteringS2S (fra)": 48.58, + "MedrxivClusteringP2P": 32.6, + "MedrxivClusteringS2S": 30.8, + "RedditClustering": 61.42, + "RedditClusteringP2P": 64.13, + "StackExchangeClustering": 72.22, + "StackExchangeClusteringP2P": 38.49, + "ThuNewsClusteringP2P": 58.71, + "ThuNewsClusteringS2S": 49.86, + "TwentyNewsgroupsClustering": 52.56 } ] }, "PairClassification": { "ap": [ { - "Model": "unsup-simcse-bert-base-uncased", - "SprintDuplicateQuestions": 69.41, - "TwitterSemEval2015": 60.21, - "TwitterURLCorpus": 81.37 + "Model": "text-embedding-ada-002", + "Cmnli": 76.03, + "Ocnli": 63.08, + "OpusparcusPC (fr)": 94.12, + "PawsXPairClassification (fr)": 60.16, + "SprintDuplicateQuestions": 92.17, + "TwitterSemEval2015": 75.28, + "TwitterURLCorpus": 87.22 } ] }, "Reranking": { "map": [ { - "Model": "unsup-simcse-bert-base-uncased", - "AskUbuntuDupQuestions": 51.57, - "MindSmallReranking": 28.62, - "SciDocsRR": 66.33, - "StackOverflowDupQuestions": 39.35 + "Model": "text-embedding-ada-002", + "AskUbuntuDupQuestions": 62.05, + "CMedQAv1": 63.08, + "CMedQAv2": 64.02, + "MMarcoReranking": 23.39, + "MindSmallReranking": 31.45, + "SciDocsRR": 81.22, + "StackOverflowDupQuestions": 50.54, + "SyntecReranking": 89.87, + "T2Reranking": 66.65 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "unsup-simcse-bert-base-uncased", - "ArguAna": 38.34, - "CQADupstackRetrieval": 13.22, - "ClimateFEVER": 11.8, - "DBPedia": 15.04, - "FEVER": 21.06, - "FiQA2018": 9.84, - "HotpotQA": 19.75, - "MSMARCO": 9.35, - "NFCorpus": 9.88, - "NQ": 11.69, - "QuoraRetrieval": 78.03, - "SCIDOCS": 5.5, - "SciFact": 25.72, - "TRECCOVID": 26.2, - "Touche2020": 8.9 + "Model": "text-embedding-ada-002", + "ARCChallenge": 13.3, + "AlloprofRetrieval": 51.64, + "AlphaNLI": 25.65, + "ArguAna": 57.44, + "BSARDRetrieval": 0.61, + "CQADupstackRetrieval": 41.69, + "ClimateFEVER": 21.64, + "CmedqaRetrieval": 22.36, + "CovidRetrieval": 57.21, + "DBPedia": 39.39, + "DuRetrieval": 71.17, + "EcomRetrieval": 44.49, + "FEVER": 74.99, + "FiQA2018": 44.41, + "HellaSwag": 29.29, + "HotpotQA": 60.9, + "MMarcoRetrieval": 69.86, + "MSMARCO": 40.91, + "MedicalRetrieval": 37.92, + "MintakaRetrieval (fr)": 29.94, + "NFCorpus": 36.97, + "NQ": 51.58, + "PIQA": 31.02, + "Quail": 5.83, + "QuoraRetrieval": 87.6, + "RARbCode": 83.39, + "RARbMath": 73.21, + "SCIDOCS": 18.36, + "SIQA": 3.14, + "SciFact": 72.75, + "SpartQA": 4.23, + "SyntecRetrieval": 85.97, + "T2Retrieval": 69.14, + "TRECCOVID": 68.47, + "TempReasonL1": 1.68, + "TempReasonL2Fact": 19.93, + "TempReasonL2Pure": 2.6, + "TempReasonL3Fact": 18.02, + "TempReasonL3Pure": 7.58, + "Touche2020": 21.61, + "VideoRetrieval": 43.85, + "WinoGrande": 19.65, + "XPQARetrieval (fr)": 73.0 } ] }, "STS": { "spearman": [ { - "Model": "unsup-simcse-bert-base-uncased", - "BIOSSES": 72.31, - "SICK-R": 72.24, - "STS12": 66.05, - "STS13": 81.49, - "STS14": 73.61, - "STS15": 79.72, - "STS16": 78.12, - "STS17 (en-en)": 83.58, - "STS22 (en)": 59.65, - "STSBenchmark": 76.52 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "unsup-simcse-bert-base-uncased", - "SummEval": 31.15 + "Model": "text-embedding-ada-002", + "AFQMC": 23.88, + "ATEC": 29.25, + "BIOSSES": 86.35, + "BQ": 45.33, + "LCQMC": 68.41, + "PAWSX": 16.55, + "QBQTC": 30.27, + "SICK-R": 80.6, + "SICKFr": 76.28, + "STS12": 69.8, + "STS13": 83.27, + "STS14": 76.09, + "STS15": 86.12, + "STS16": 85.96, + "STS17 (en-en)": 90.25, + "STS22 (zh)": 62.53, + "STS22 (en)": 68.12, + "STS22 (tr)": 64.5, + "STS22 (fr)": 81.09, + "STSB": 70.61, + "STSBenchmark": 83.17, + "STSBenchmarkMultilingualSTS (fr)": 77.55 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "text-embedding-ada-002", + "SummEval": 30.8, + "SummEvalFr": 30.5 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "unsup-simcse-bert-base-uncased" + "Model": "text-embedding-ada-002" } ] } }, - "SFR-Embedding-Mistral": { + "text-similarity-babbage-001": { "BitextMining": { "f1": [ { - "Model": "SFR-Embedding-Mistral" + "Model": "text-similarity-babbage-001" } ] }, "Classification": { "accuracy": [ { - "Model": "SFR-Embedding-Mistral" + "Model": "text-similarity-babbage-001" } ] }, "Clustering": { "v_measure": [ { - "Model": "SFR-Embedding-Mistral" + "Model": "text-similarity-babbage-001", + "RedditClustering": 45.64, + "StackExchangeClustering": 53.01, + "TwentyNewsgroupsClustering": 42.01 } ] }, "PairClassification": { "ap": [ { - "Model": "SFR-Embedding-Mistral" + "Model": "text-similarity-babbage-001", + "SprintDuplicateQuestions": 76.46, + "TwitterSemEval2015": 70.85, + "TwitterURLCorpus": 85.08 } ] }, "Reranking": { "map": [ { - "Model": "SFR-Embedding-Mistral" + "Model": "text-similarity-babbage-001", + "AskUbuntuDupQuestions": 54.68, + "SciDocsRR": 72.78, + "StackOverflowDupQuestions": 40.65 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "SFR-Embedding-Mistral", - "BrightRetrieval (sustainable_living)": 19.79, - "BrightRetrieval (economics)": 17.84, - "BrightRetrieval (theoremqa_theorems)": 24.05, - "BrightRetrieval (aops)": 7.43, - "BrightRetrieval (theoremqa_questions)": 23.05, - "BrightRetrieval (psychology)": 18.97, - "BrightRetrieval (stackoverflow)": 12.72, - "BrightRetrieval (pony)": 1.97, - "BrightRetrieval (leetcode)": 27.35, - "BrightRetrieval (biology)": 19.49, - "BrightRetrieval (earth_science)": 26.63, - "BrightRetrieval (robotics)": 16.7 + "Model": "text-similarity-babbage-001" } ] }, "STS": { "spearman": [ { - "Model": "SFR-Embedding-Mistral" + "Model": "text-similarity-babbage-001", + "BIOSSES": 78.12, + "SICK-R": 77.02, + "STSBenchmark": 84.32 } ] }, "Summarization": { "spearman": [ { - "Model": "SFR-Embedding-Mistral" + "Model": "text-similarity-babbage-001" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "SFR-Embedding-Mistral" + "Model": "text-similarity-babbage-001" } ] } }, - "text-embedding-3-small": { + "bert-base-25lang-cased": { "BitextMining": { "f1": [ { - "Model": "text-embedding-3-small" + "Model": "bert-base-25lang-cased" } ] }, "Classification": { "accuracy": [ { - "Model": "text-embedding-3-small", - "AmazonCounterfactualClassification (en)": 76.42, - "AmazonPolarityClassification": 90.84, - "AmazonReviewsClassification (en)": 45.73, - "Banking77Classification": 83.01, - "EmotionClassification": 50.63, - "ImdbClassification": 83.66, - "MTOPDomainClassification (en)": 93.91, - "MTOPIntentClassification (en)": 70.98, - "MassiveIntentClassification (en)": 72.86, - "MassiveScenarioClassification (en)": 76.84, - "ToxicConversationsClassification": 71.91, - "TweetSentimentExtractionClassification": 61.72 + "Model": "bert-base-25lang-cased", + "AmazonReviewsClassification (fr)": 29.39, + "MTOPDomainClassification (fr)": 63.63, + "MTOPIntentClassification (fr)": 37.86, + "MasakhaNEWSClassification (fra)": 63.91, + "MassiveIntentClassification (fr)": 37.3, + "MassiveScenarioClassification (fr)": 44.47 } ] }, "Clustering": { "v_measure": [ { - "Model": "text-embedding-3-small", - "ArxivClusteringP2P": 46.57, - "ArxivClusteringS2S": 39.35, - "BiorxivClusteringP2P": 37.77, - "BiorxivClusteringS2S": 34.68, - "MedrxivClusteringP2P": 32.77, - "MedrxivClusteringS2S": 31.85, - "RedditClustering": 64.09, - "RedditClusteringP2P": 65.12, - "StackExchangeClustering": 72.05, - "StackExchangeClusteringP2P": 34.04, - "TwentyNewsgroupsClustering": 54.81 + "Model": "bert-base-25lang-cased", + "AlloProfClusteringP2P": 53.49, + "AlloProfClusteringS2S": 43.1, + "HALClusteringS2S": 19.78, + "MLSUMClusteringP2P": 40.73, + "MLSUMClusteringS2S": 31.94, + "MasakhaNEWSClusteringP2P (fra)": 24.23, + "MasakhaNEWSClusteringS2S (fra)": 24.46 } ] }, "PairClassification": { "ap": [ { - "Model": "text-embedding-3-small", - "OpusparcusPC (fr)": 94.45, - "SprintDuplicateQuestions": 94.58, - "TwitterSemEval2015": 73.33, - "TwitterURLCorpus": 87.21 + "Model": "bert-base-25lang-cased", + "OpusparcusPC (fr)": 86.79, + "PawsXPairClassification (fr)": 53.39 } ] }, "Reranking": { "map": [ { - "Model": "text-embedding-3-small", - "AskUbuntuDupQuestions": 62.18, - "MindSmallReranking": 29.93, - "SciDocsRR": 83.25, - "StackOverflowDupQuestions": 51.53 + "Model": "bert-base-25lang-cased", + "AlloprofReranking": 36.25, + "SyntecReranking": 53.25 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "text-embedding-3-small", - "ARCChallenge": 14.63, - "AlphaNLI": 30.61, - "ArguAna": 55.49, - "CQADupstackRetrieval": 42.58, - "ClimateFEVER": 26.86, - "DBPedia": 39.97, - "FEVER": 79.42, - "FiQA2018": 44.91, - "HellaSwag": 30.94, - "HotpotQA": 63.63, - "MSMARCO": 37.02, - "NFCorpus": 38.33, - "NQ": 52.86, - "PIQA": 33.69, - "Quail": 6.11, - "QuoraRetrieval": 88.83, - "RARbCode": 72.03, - "RARbMath": 71.07, - "SCIDOCS": 20.8, - "SIQA": 3.03, - "SciFact": 73.37, - "SpartQA": 6.63, - "TRECCOVID": 77.9, - "TempReasonL1": 2.35, - "TempReasonL2Fact": 25.68, - "TempReasonL2Pure": 2.76, - "TempReasonL3Fact": 22.09, - "TempReasonL3Pure": 9.79, - "Touche2020": 24.28, - "WinoGrande": 31.53 + "Model": "bert-base-25lang-cased", + "AlloprofRetrieval": 1.6, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 3.55, + "SyntecRetrieval": 18.95, + "XPQARetrieval (fr)": 18.46 } ] }, "STS": { "spearman": [ { - "Model": "text-embedding-3-small", - "BIOSSES": 88.72, - "SICK-R": 76.73, - "STS12": 73.09, - "STS13": 84.92, - "STS14": 79.81, - "STS15": 88.01, - "STS16": 84.41, - "STS17 (en-en)": 90.94, - "STS22 (en)": 64.96, - "STSBenchmark": 84.24 + "Model": "bert-base-25lang-cased", + "SICKFr": 58.76, + "STS22 (fr)": 38.77, + "STSBenchmarkMultilingualSTS (fr)": 52.25 } ] }, "Summarization": { "spearman": [ { - "Model": "text-embedding-3-small", - "SummEval": 31.12 + "Model": "bert-base-25lang-cased", + "SummEvalFr": 28.84 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "text-embedding-3-small" + "Model": "bert-base-25lang-cased" } ] } }, - "Cohere-embed-english-v3.0-instruct": { + "USER-base": { "BitextMining": { "f1": [ { - "Model": "Cohere-embed-english-v3.0-instruct" + "Model": "USER-base", + "Tatoeba (rus-Cyrl_eng-Latn)": 90.2 } ] }, "Classification": { "accuracy": [ { - "Model": "Cohere-embed-english-v3.0-instruct" + "Model": "USER-base", + "GeoreviewClassification (rus-Cyrl)": 47.23, + "HeadlineClassification (rus-Cyrl)": 74.88, + "InappropriatenessClassification (rus-Cyrl)": 61.94, + "KinopoiskClassification (rus-Cyrl)": 55.69, + "MassiveIntentClassification (rus-Cyrl)": 65.57, + "MassiveScenarioClassification (rus-Cyrl)": 68.33, + "RuReviewsClassification (rus-Cyrl)": 66.44, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 55.55, + "RuSciBenchOECDClassification (rus-Cyrl)": 43.28 } ] }, "Clustering": { "v_measure": [ { - "Model": "Cohere-embed-english-v3.0-instruct" + "Model": "USER-base", + "GeoreviewClusteringP2P (rus-Cyrl)": 64.16, + "MLSUMClusteringP2P (rus-Cyrl)": 48.09, + "MLSUMClusteringS2S (rus-Cyrl)": 45.73, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 51.38, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.73 } ] }, "PairClassification": { "ap": [ { - "Model": "Cohere-embed-english-v3.0-instruct" + "Model": "USER-base", + "OpusparcusPC (rus-Cyrl)": 91.65, + "TERRa (rus-Cyrl)": 60.02 } ] }, "Reranking": { "map": [ { - "Model": "Cohere-embed-english-v3.0-instruct" + "Model": "USER-base", + "RuBQReranking (rus-Cyrl)": 64.42 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "Cohere-embed-english-v3.0-instruct", - "ARCChallenge": 10.1, - "AlphaNLI": 18.75, - "HellaSwag": 29.02, - "PIQA": 27.89, - "Quail": 7.77, - "RARbCode": 56.56, - "RARbMath": 72.05, - "SIQA": 5.03, - "SpartQA": 3.33, - "TempReasonL1": 1.43, - "TempReasonL2Fact": 40.46, - "TempReasonL2Pure": 2.39, - "TempReasonL3Fact": 33.87, - "TempReasonL3Pure": 7.52, - "WinoGrande": 65.02 + "Model": "USER-base", + "RiaNewsRetrieval (rus-Cyrl)": 77.83, + "RuBQRetrieval (rus-Cyrl)": 56.86 } ] }, "STS": { "spearman": [ { - "Model": "Cohere-embed-english-v3.0-instruct" + "Model": "USER-base", + "RUParaPhraserSTS (rus-Cyrl)": 73.56, + "RuSTSBenchmarkSTS (rus-Cyrl)": 82.26, + "STS22 (rus-Cyrl)": 63.39, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 81.81 } ] }, "Summarization": { "spearman": [ { - "Model": "Cohere-embed-english-v3.0-instruct" + "Model": "USER-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "Cohere-embed-english-v3.0-instruct" + "Model": "USER-base" } ] } }, - "voyage-2": { + "paraphrase-multilingual-mpnet-base-v2": { "BitextMining": { "f1": [ { - "Model": "voyage-2" + "Model": "paraphrase-multilingual-mpnet-base-v2", + "BUCC (de-en)": 98.59, + "BUCC (fr-en)": 96.89, + "BUCC (ru-en)": 96.44, + "BUCC (zh-en)": 97.56, + "BornholmBitextMining (dan-Latn)": 18.18, + "Tatoeba (afr-eng)": 72.96, + "Tatoeba (amh-eng)": 53.49, + "Tatoeba (ang-eng)": 16.72, + "Tatoeba (ara-eng)": 90.19, + "Tatoeba (arq-eng)": 19.84, + "Tatoeba (arz-eng)": 55.69, + "Tatoeba (ast-eng)": 70.08, + "Tatoeba (awa-eng)": 42.83, + "Tatoeba (aze-eng)": 76.36, + "Tatoeba (bel-eng)": 79.94, + "Tatoeba (ben-eng)": 64.9, + "Tatoeba (ber-eng)": 4.88, + "Tatoeba (bos-eng)": 94.02, + "Tatoeba (bre-eng)": 6.42, + "Tatoeba (bul-eng)": 93.52, + "Tatoeba (cat-eng)": 96.05, + "Tatoeba (cbk-eng)": 58.68, + "Tatoeba (ceb-eng)": 7.39, + "Tatoeba (ces-eng)": 95.73, + "Tatoeba (cha-eng)": 12.59, + "Tatoeba (cmn-eng)": 95.83, + "Tatoeba (cor-eng)": 3.53, + "Tatoeba (csb-eng)": 23.73, + "Tatoeba (cym-eng)": 22.31, + "Tatoeba (dan-eng)": 96.17, + "Tatoeba (deu-eng)": 97.73, + "Tatoeba (dsb-eng)": 36.85, + "Tatoeba (dtp-eng)": 5.03, + "Tatoeba (ell-eng)": 94.93, + "Tatoeba (epo-eng)": 55.12, + "Tatoeba (est-eng)": 98.4, + "Tatoeba (eus-eng)": 31.33, + "Tatoeba (fao-eng)": 38.24, + "Tatoeba (fin-eng)": 95.92, + "Tatoeba (fra-eng)": 93.12, + "Tatoeba (fry-eng)": 43.54, + "Tatoeba (gla-eng)": 4.72, + "Tatoeba (gle-eng)": 16.85, + "Tatoeba (glg-eng)": 95.32, + "Tatoeba (gsw-eng)": 25.12, + "Tatoeba (heb-eng)": 88.26, + "Tatoeba (hin-eng)": 97.75, + "Tatoeba (hrv-eng)": 97.0, + "Tatoeba (hsb-eng)": 44.32, + "Tatoeba (hun-eng)": 94.18, + "Tatoeba (hye-eng)": 94.38, + "Tatoeba (ido-eng)": 43.91, + "Tatoeba (ile-eng)": 60.36, + "Tatoeba (ina-eng)": 84.32, + "Tatoeba (ind-eng)": 93.5, + "Tatoeba (isl-eng)": 59.25, + "Tatoeba (ita-eng)": 93.76, + "Tatoeba (jav-eng)": 23.39, + "Tatoeba (jpn-eng)": 92.51, + "Tatoeba (kab-eng)": 1.41, + "Tatoeba (kat-eng)": 95.46, + "Tatoeba (kaz-eng)": 61.49, + "Tatoeba (khm-eng)": 58.8, + "Tatoeba (kor-eng)": 93.07, + "Tatoeba (kur-eng)": 61.44, + "Tatoeba (kzj-eng)": 5.88, + "Tatoeba (lat-eng)": 24.25, + "Tatoeba (lfn-eng)": 49.56, + "Tatoeba (lit-eng)": 95.37, + "Tatoeba (lvs-eng)": 97.53, + "Tatoeba (mal-eng)": 88.46, + "Tatoeba (mar-eng)": 93.83, + "Tatoeba (max-eng)": 48.77, + "Tatoeba (mhr-eng)": 7.57, + "Tatoeba (mkd-eng)": 93.02, + "Tatoeba (mon-eng)": 96.14, + "Tatoeba (nds-eng)": 38.88, + "Tatoeba (nld-eng)": 95.5, + "Tatoeba (nno-eng)": 81.41, + "Tatoeba (nob-eng)": 98.53, + "Tatoeba (nov-eng)": 50.23, + "Tatoeba (oci-eng)": 43.49, + "Tatoeba (orv-eng)": 23.77, + "Tatoeba (pam-eng)": 5.39, + "Tatoeba (pes-eng)": 93.47, + "Tatoeba (pms-eng)": 34.19, + "Tatoeba (pol-eng)": 96.95, + "Tatoeba (por-eng)": 93.02, + "Tatoeba (ron-eng)": 96.43, + "Tatoeba (rus-eng)": 92.92, + "Tatoeba (slk-eng)": 96.62, + "Tatoeba (slv-eng)": 97.08, + "Tatoeba (spa-eng)": 97.0, + "Tatoeba (sqi-eng)": 98.57, + "Tatoeba (srp-eng)": 94.12, + "Tatoeba (swe-eng)": 95.45, + "Tatoeba (swg-eng)": 22.8, + "Tatoeba (swh-eng)": 16.02, + "Tatoeba (tam-eng)": 73.6, + "Tatoeba (tat-eng)": 10.89, + "Tatoeba (tel-eng)": 79.73, + "Tatoeba (tgl-eng)": 17.67, + "Tatoeba (tha-eng)": 95.99, + "Tatoeba (tuk-eng)": 14.91, + "Tatoeba (tur-eng)": 96.17, + "Tatoeba (tzl-eng)": 34.21, + "Tatoeba (uig-eng)": 48.35, + "Tatoeba (ukr-eng)": 92.67, + "Tatoeba (urd-eng)": 95.12, + "Tatoeba (uzb-eng)": 23.19, + "Tatoeba (vie-eng)": 97.23, + "Tatoeba (war-eng)": 7.42, + "Tatoeba (wuu-eng)": 78.25, + "Tatoeba (xho-eng)": 6.53, + "Tatoeba (yid-eng)": 30.73, + "Tatoeba (yue-eng)": 77.58, + "Tatoeba (zsm-eng)": 95.8, + "Tatoeba (gsw-Latn_eng-Latn)": 25.12, + "Tatoeba (spa-Latn_eng-Latn)": 97.0, + "Tatoeba (lat-Latn_eng-Latn)": 24.25, + "Tatoeba (hun-Latn_eng-Latn)": 94.18, + "Tatoeba (eus-Latn_eng-Latn)": 31.33, + "Tatoeba (heb-Hebr_eng-Latn)": 88.26, + "Tatoeba (ang-Latn_eng-Latn)": 16.72, + "Tatoeba (swe-Latn_eng-Latn)": 95.45, + "Tatoeba (slk-Latn_eng-Latn)": 96.62, + "Tatoeba (ell-Grek_eng-Latn)": 94.93, + "Tatoeba (nld-Latn_eng-Latn)": 95.5, + "Tatoeba (cym-Latn_eng-Latn)": 22.31, + "Tatoeba (sqi-Latn_eng-Latn)": 98.57, + "Tatoeba (csb-Latn_eng-Latn)": 23.73, + "Tatoeba (ben-Beng_eng-Latn)": 64.9, + "Tatoeba (bre-Latn_eng-Latn)": 6.42, + "Tatoeba (mkd-Cyrl_eng-Latn)": 93.02, + "Tatoeba (cmn-Hans_eng-Latn)": 95.83, + "Tatoeba (deu-Latn_eng-Latn)": 97.73, + "Tatoeba (fao-Latn_eng-Latn)": 38.24, + "Tatoeba (afr-Latn_eng-Latn)": 72.96, + "Tatoeba (nno-Latn_eng-Latn)": 81.41, + "Tatoeba (jpn-Jpan_eng-Latn)": 92.51, + "Tatoeba (tzl-Latn_eng-Latn)": 34.21, + "Tatoeba (arz-Arab_eng-Latn)": 55.69, + "Tatoeba (ita-Latn_eng-Latn)": 93.76, + "Tatoeba (arq-Arab_eng-Latn)": 19.84, + "Tatoeba (uzb-Latn_eng-Latn)": 23.19, + "Tatoeba (rus-Cyrl_eng-Latn)": 92.92, + "Tatoeba (tat-Cyrl_eng-Latn)": 10.89, + "Tatoeba (fin-Latn_eng-Latn)": 95.92, + "Tatoeba (nob-Latn_eng-Latn)": 98.53, + "Tatoeba (tam-Taml_eng-Latn)": 73.6, + "Tatoeba (kur-Latn_eng-Latn)": 61.44, + "Tatoeba (wuu-Hans_eng-Latn)": 78.25, + "Tatoeba (cor-Latn_eng-Latn)": 3.53, + "Tatoeba (cha-Latn_eng-Latn)": 12.59, + "Tatoeba (hsb-Latn_eng-Latn)": 44.32, + "Tatoeba (max-Deva_eng-Latn)": 48.77, + "Tatoeba (kat-Geor_eng-Latn)": 95.46, + "Tatoeba (mal-Mlym_eng-Latn)": 88.46, + "Tatoeba (ina-Latn_eng-Latn)": 84.32, + "Tatoeba (cbk-Latn_eng-Latn)": 58.68, + "Tatoeba (yid-Hebr_eng-Latn)": 30.73, + "Tatoeba (swg-Latn_eng-Latn)": 22.8, + "Tatoeba (dtp-Latn_eng-Latn)": 5.03, + "Tatoeba (ber-Tfng_eng-Latn)": 4.88, + "Tatoeba (epo-Latn_eng-Latn)": 55.12, + "Tatoeba (mar-Deva_eng-Latn)": 93.83, + "Tatoeba (kaz-Cyrl_eng-Latn)": 61.49, + "Tatoeba (tgl-Latn_eng-Latn)": 17.67, + "Tatoeba (hrv-Latn_eng-Latn)": 97.0, + "Tatoeba (bel-Cyrl_eng-Latn)": 79.94, + "Tatoeba (pam-Latn_eng-Latn)": 5.39, + "Tatoeba (zsm-Latn_eng-Latn)": 95.8, + "Tatoeba (ces-Latn_eng-Latn)": 95.73, + "Tatoeba (gla-Latn_eng-Latn)": 4.72, + "Tatoeba (hin-Deva_eng-Latn)": 97.75, + "Tatoeba (slv-Latn_eng-Latn)": 97.08, + "Tatoeba (cat-Latn_eng-Latn)": 96.05, + "Tatoeba (war-Latn_eng-Latn)": 7.42, + "Tatoeba (hye-Armn_eng-Latn)": 94.38, + "Tatoeba (ind-Latn_eng-Latn)": 93.5, + "Tatoeba (kor-Hang_eng-Latn)": 93.07, + "Tatoeba (por-Latn_eng-Latn)": 93.02, + "Tatoeba (fry-Latn_eng-Latn)": 43.54, + "Tatoeba (dan-Latn_eng-Latn)": 96.17, + "Tatoeba (nov-Latn_eng-Latn)": 50.23, + "Tatoeba (vie-Latn_eng-Latn)": 97.23, + "Tatoeba (kzj-Latn_eng-Latn)": 5.88, + "Tatoeba (ido-Latn_eng-Latn)": 43.91, + "Tatoeba (tuk-Latn_eng-Latn)": 14.91, + "Tatoeba (glg-Latn_eng-Latn)": 95.32, + "Tatoeba (bos-Latn_eng-Latn)": 94.02, + "Tatoeba (gle-Latn_eng-Latn)": 16.85, + "Tatoeba (fra-Latn_eng-Latn)": 93.12, + "Tatoeba (lvs-Latn_eng-Latn)": 97.53, + "Tatoeba (mon-Cyrl_eng-Latn)": 96.14, + "Tatoeba (lit-Latn_eng-Latn)": 95.37, + "Tatoeba (ron-Latn_eng-Latn)": 96.43, + "Tatoeba (pms-Latn_eng-Latn)": 34.19, + "Tatoeba (lfn-Latn_eng-Latn)": 49.56, + "Tatoeba (isl-Latn_eng-Latn)": 59.25, + "Tatoeba (xho-Latn_eng-Latn)": 6.53, + "Tatoeba (orv-Cyrl_eng-Latn)": 23.77, + "Tatoeba (ukr-Cyrl_eng-Latn)": 92.67, + "Tatoeba (dsb-Latn_eng-Latn)": 36.85, + "Tatoeba (nds-Latn_eng-Latn)": 38.88, + "Tatoeba (amh-Ethi_eng-Latn)": 53.49, + "Tatoeba (yue-Hant_eng-Latn)": 77.58, + "Tatoeba (urd-Arab_eng-Latn)": 95.12, + "Tatoeba (tel-Telu_eng-Latn)": 79.73, + "Tatoeba (ile-Latn_eng-Latn)": 60.36, + "Tatoeba (jav-Latn_eng-Latn)": 23.39, + "Tatoeba (ast-Latn_eng-Latn)": 70.08, + "Tatoeba (tha-Thai_eng-Latn)": 95.99, + "Tatoeba (ara-Arab_eng-Latn)": 90.19, + "Tatoeba (pes-Arab_eng-Latn)": 93.47, + "Tatoeba (awa-Deva_eng-Latn)": 42.83, + "Tatoeba (tur-Latn_eng-Latn)": 96.17, + "Tatoeba (ceb-Latn_eng-Latn)": 7.39, + "Tatoeba (swh-Latn_eng-Latn)": 16.02, + "Tatoeba (srp-Cyrl_eng-Latn)": 94.12, + "Tatoeba (est-Latn_eng-Latn)": 98.4, + "Tatoeba (aze-Latn_eng-Latn)": 76.36, + "Tatoeba (bul-Cyrl_eng-Latn)": 93.52, + "Tatoeba (oci-Latn_eng-Latn)": 43.49, + "Tatoeba (pol-Latn_eng-Latn)": 96.95, + "Tatoeba (kab-Latn_eng-Latn)": 1.41, + "Tatoeba (khm-Khmr_eng-Latn)": 58.8, + "Tatoeba (uig-Arab_eng-Latn)": 48.35, + "Tatoeba (mhr-Cyrl_eng-Latn)": 7.57 } ] }, "Classification": { "accuracy": [ { - "Model": "voyage-2", - "AmazonReviewsClassification (fr)": 37.26, - "MTOPDomainClassification (fr)": 79.79, - "MTOPIntentClassification (fr)": 45.62, - "MasakhaNEWSClassification (fra)": 80.19, - "MassiveIntentClassification (fr)": 53.7, - "MassiveScenarioClassification (fr)": 62.46 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "voyage-2", - "AlloProfClusteringP2P": 57.96, - "AlloProfClusteringS2S": 41.65, - "HALClusteringS2S": 24.84, - "MLSUMClusteringP2P": 45.08, - "MLSUMClusteringS2S": 38.77, - "MasakhaNEWSClusteringP2P (fra)": 48.54, - "MasakhaNEWSClusteringS2S (fra)": 36.33 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "voyage-2", - "OpusparcusPC (fr)": 89.76, - "PawsXPairClassification (fr)": 58.96 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "voyage-2", - "AlloprofReranking": 63.54, - "SyntecReranking": 82.65 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "voyage-2", - "AlloprofRetrieval": 45.5, - "BSARDRetrieval": 0.15, - "MintakaRetrieval (fr)": 15.51, - "SyntecRetrieval": 75.83, - "XPQARetrieval (fr)": 67.07 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "voyage-2", - "SICKFr": 68.51, - "STS22 (fr)": 70.51, - "STSBenchmarkMultilingualSTS (fr)": 76.43 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "voyage-2", - "SummEvalFr": 30.88 + "Model": "paraphrase-multilingual-mpnet-base-v2", + "AllegroReviews": 33.86, + "AllegroReviews (pol-Latn)": 33.89, + "AmazonCounterfactualClassification (de)": 69.95, + "AmazonCounterfactualClassification (en)": 75.81, + "AmazonCounterfactualClassification (en-ext)": 76.25, + "AmazonCounterfactualClassification (ja)": 69.79, + "AmazonCounterfactualClassification (deu-Latn)": 69.96, + "AmazonCounterfactualClassification (jpn-Jpan)": 69.78, + "AmazonPolarityClassification": 76.41, + "AmazonReviewsClassification (de)": 39.52, + "AmazonReviewsClassification (en)": 38.52, + "AmazonReviewsClassification (es)": 39.99, + "AmazonReviewsClassification (fr)": 39.0, + "AmazonReviewsClassification (ja)": 36.64, + "AmazonReviewsClassification (zh)": 37.74, + "AmazonReviewsClassification (deu-Latn)": 39.53, + "AmazonReviewsClassification (spa-Latn)": 39.97, + "AmazonReviewsClassification (fra-Latn)": 38.98, + "AmazonReviewsClassification (jpn-Jpan)": 36.65, + "AmazonReviewsClassification (cmn-Hans)": 37.74, + "AngryTweetsClassification (dan-Latn)": 54.84, + "Banking77Classification": 81.1, + "CBD": 65.0, + "CBD (pol-Latn)": 64.97, + "DanishPoliticalCommentsClassification (dan-Latn)": 40.96, + "EmotionClassification": 45.85, + "GeoreviewClassification (rus-Cyrl)": 42.33, + "HeadlineClassification (rus-Cyrl)": 70.35, + "IFlyTek (cmn-Hans)": 43.98, + "ImdbClassification": 64.58, + "InappropriatenessClassification (rus-Cyrl)": 59.32, + "JDReview (cmn-Hans)": 70.34, + "KinopoiskClassification (rus-Cyrl)": 44.31, + "LccSentimentClassification (dan-Latn)": 58.4, + "MTOPDomainClassification (de)": 85.73, + "MTOPDomainClassification (en)": 89.24, + "MTOPDomainClassification (es)": 86.96, + "MTOPDomainClassification (fr)": 81.21, + "MTOPDomainClassification (hi)": 84.76, + "MTOPDomainClassification (th)": 82.51, + "MTOPDomainClassification (deu-Latn)": 85.73, + "MTOPDomainClassification (spa-Latn)": 86.98, + "MTOPDomainClassification (fra-Latn)": 81.21, + "MTOPDomainClassification (hin-Deva)": 84.76, + "MTOPDomainClassification (tha-Thai)": 82.51, + "MTOPIntentClassification (de)": 61.27, + "MTOPIntentClassification (en)": 68.69, + "MTOPIntentClassification (es)": 66.59, + "MTOPIntentClassification (fr)": 59.76, + "MTOPIntentClassification (hi)": 62.37, + "MTOPIntentClassification (th)": 64.8, + "MTOPIntentClassification (deu-Latn)": 61.26, + "MTOPIntentClassification (spa-Latn)": 66.6, + "MTOPIntentClassification (fra-Latn)": 59.75, + "MTOPIntentClassification (hin-Deva)": 62.38, + "MTOPIntentClassification (tha-Thai)": 64.77, + "MasakhaNEWSClassification (fra)": 78.1, + "MasakhaNEWSClassification (amh-Ethi)": 78.83, + "MasakhaNEWSClassification (eng)": 75.39, + "MasakhaNEWSClassification (fra-Latn)": 72.94, + "MasakhaNEWSClassification (hau-Latn)": 54.49, + "MasakhaNEWSClassification (ibo-Latn)": 46.79, + "MasakhaNEWSClassification (lin-Latn)": 69.77, + "MasakhaNEWSClassification (lug-Latn)": 43.05, + "MasakhaNEWSClassification (orm-Ethi)": 41.97, + "MasakhaNEWSClassification (pcm-Latn)": 90.2, + "MasakhaNEWSClassification (run-Latn)": 49.97, + "MasakhaNEWSClassification (sna-Latn)": 59.78, + "MasakhaNEWSClassification (som-Latn)": 47.65, + "MasakhaNEWSClassification (swa-Latn)": 60.42, + "MasakhaNEWSClassification (tir-Ethi)": 45.04, + "MasakhaNEWSClassification (xho-Latn)": 48.82, + "MasakhaNEWSClassification (yor-Latn)": 58.3, + "MassiveIntentClassification (pl)": 64.29, + "MassiveIntentClassification (fr)": 61.88, + "MassiveIntentClassification (mal-Mlym)": 54.34, + "MassiveIntentClassification (tel-Telu)": 52.85, + "MassiveIntentClassification (jpn-Jpan)": 63.76, + "MassiveIntentClassification (nld-Latn)": 63.57, + "MassiveIntentClassification (jav-Latn)": 36.49, + "MassiveIntentClassification (heb-Hebr)": 58.25, + "MassiveIntentClassification (tam-Taml)": 50.18, + "MassiveIntentClassification (slv-Latn)": 63.5, + "MassiveIntentClassification (tha-Thai)": 61.12, + "MassiveIntentClassification (fra-Latn)": 64.8, + "MassiveIntentClassification (ind-Latn)": 65.43, + "MassiveIntentClassification (amh-Ethi)": 41.56, + "MassiveIntentClassification (en)": 69.32, + "MassiveIntentClassification (nob-Latn)": 62.62, + "MassiveIntentClassification (kan-Knda)": 50.62, + "MassiveIntentClassification (dan-Latn)": 62.8, + "MassiveIntentClassification (ell-Grek)": 62.63, + "MassiveIntentClassification (msa-Latn)": 60.72, + "MassiveIntentClassification (ita-Latn)": 64.69, + "MassiveIntentClassification (tur-Latn)": 64.58, + "MassiveIntentClassification (ben-Beng)": 48.79, + "MassiveIntentClassification (aze-Latn)": 56.98, + "MassiveIntentClassification (tgl-Latn)": 38.83, + "MassiveIntentClassification (mon-Cyrl)": 56.61, + "MassiveIntentClassification (urd-Arab)": 56.36, + "MassiveIntentClassification (vie-Latn)": 59.71, + "MassiveIntentClassification (cmo-Hans)": 65.32, + "MassiveIntentClassification (cym-Latn)": 27.89, + "MassiveIntentClassification (rus-Cyrl)": 63.23, + "MassiveIntentClassification (mya-Mymr)": 57.08, + "MassiveIntentClassification (hun-Latn)": 63.85, + "MassiveIntentClassification (hin-Deva)": 62.79, + "MassiveIntentClassification (hye-Armn)": 57.76, + "MassiveIntentClassification (kat-Geor)": 49.88, + "MassiveIntentClassification (fin-Latn)": 62.26, + "MassiveIntentClassification (ara-Arab)": 51.43, + "MassiveIntentClassification (por-Latn)": 64.88, + "MassiveIntentClassification (pol-Latn)": 64.32, + "MassiveIntentClassification (isl-Latn)": 37.09, + "MassiveIntentClassification (afr-Latn)": 52.35, + "MassiveIntentClassification (fas-Arab)": 65.33, + "MassiveIntentClassification (khm-Khmr)": 45.48, + "MassiveIntentClassification (kor-Kore)": 61.84, + "MassiveIntentClassification (spa-Latn)": 64.45, + "MassiveIntentClassification (cmo-Hant)": 62.33, + "MassiveIntentClassification (ron-Latn)": 62.83, + "MassiveIntentClassification (sqi-Latn)": 62.48, + "MassiveIntentClassification (swa-Latn)": 31.93, + "MassiveIntentClassification (swe-Latn)": 64.71, + "MassiveIntentClassification (deu-Latn)": 59.56, + "MassiveIntentClassification (lav-Latn)": 61.29, + "MassiveScenarioClassification (pl)": 68.98, + "MassiveScenarioClassification (fr)": 67.9, + "MassiveScenarioClassification (tam-Taml)": 55.97, + "MassiveScenarioClassification (heb-Hebr)": 65.16, + "MassiveScenarioClassification (ind-Latn)": 70.73, + "MassiveScenarioClassification (afr-Latn)": 59.68, + "MassiveScenarioClassification (fin-Latn)": 67.58, + "MassiveScenarioClassification (vie-Latn)": 65.7, + "MassiveScenarioClassification (mon-Cyrl)": 60.84, + "MassiveScenarioClassification (sqi-Latn)": 69.62, + "MassiveScenarioClassification (nob-Latn)": 70.23, + "MassiveScenarioClassification (por-Latn)": 70.08, + "MassiveScenarioClassification (aze-Latn)": 61.52, + "MassiveScenarioClassification (nld-Latn)": 70.37, + "MassiveScenarioClassification (spa-Latn)": 70.4, + "MassiveScenarioClassification (mal-Mlym)": 60.14, + "MassiveScenarioClassification (cmo-Hant)": 68.71, + "MassiveScenarioClassification (fra-Latn)": 70.71, + "MassiveScenarioClassification (ita-Latn)": 69.74, + "MassiveScenarioClassification (hun-Latn)": 70.31, + "MassiveScenarioClassification (urd-Arab)": 62.92, + "MassiveScenarioClassification (cym-Latn)": 35.27, + "MassiveScenarioClassification (khm-Khmr)": 53.13, + "MassiveScenarioClassification (swa-Latn)": 37.26, + "MassiveScenarioClassification (mya-Mymr)": 63.03, + "MassiveScenarioClassification (isl-Latn)": 44.16, + "MassiveScenarioClassification (tha-Thai)": 69.44, + "MassiveScenarioClassification (kat-Geor)": 57.3, + "MassiveScenarioClassification (pol-Latn)": 68.99, + "MassiveScenarioClassification (ell-Grek)": 68.81, + "MassiveScenarioClassification (cmo-Hans)": 71.25, + "MassiveScenarioClassification (tgl-Latn)": 43.98, + "MassiveScenarioClassification (lav-Latn)": 66.28, + "MassiveScenarioClassification (jpn-Jpan)": 69.68, + "MassiveScenarioClassification (deu-Latn)": 67.35, + "MassiveScenarioClassification (ara-Arab)": 57.79, + "MassiveScenarioClassification (en)": 75.35, + "MassiveScenarioClassification (msa-Latn)": 65.85, + "MassiveScenarioClassification (tel-Telu)": 58.79, + "MassiveScenarioClassification (ben-Beng)": 54.52, + "MassiveScenarioClassification (kan-Knda)": 56.08, + "MassiveScenarioClassification (tur-Latn)": 70.41, + "MassiveScenarioClassification (kor-Kore)": 68.51, + "MassiveScenarioClassification (hye-Armn)": 63.03, + "MassiveScenarioClassification (jav-Latn)": 44.22, + "MassiveScenarioClassification (rus-Cyrl)": 69.92, + "MassiveScenarioClassification (hin-Deva)": 67.94, + "MassiveScenarioClassification (amh-Ethi)": 48.96, + "MassiveScenarioClassification (dan-Latn)": 71.04, + "MassiveScenarioClassification (fas-Arab)": 69.88, + "MassiveScenarioClassification (slv-Latn)": 70.81, + "MassiveScenarioClassification (swe-Latn)": 71.6, + "MassiveScenarioClassification (ron-Latn)": 67.94, + "MultilingualSentiment (cmn-Hans)": 66.49, + "NoRecClassification (nob-Latn)": 50.32, + "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 41.57, + "OnlineShopping (cmn-Hans)": 87.75, + "PAC": 63.76, + "PAC (pol-Latn)": 63.76, + "PolEmo2.0-IN": 62.78, + "PolEmo2.0-IN (pol-Latn)": 62.74, + "PolEmo2.0-OUT": 19.98, + "PolEmo2.0-OUT (pol-Latn)": 19.92, + "RuReviewsClassification (rus-Cyrl)": 62.33, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 56.01, + "RuSciBenchOECDClassification (rus-Cyrl)": 44.14, + "TNews (cmn-Hans)": 43.73, + "ToxicConversationsClassification": 65.56, + "TweetSentimentExtractionClassification": 59.04, + "Waimai (cmn-Hans)": 83.97 } ] }, - "InstructionRetrieval": { - "p-MRR": [ + "Clustering": { + "v_measure": [ { - "Model": "voyage-2" - } - ] - } - }, - "e5-base": { - "BitextMining": { - "f1": [ - { - "Model": "e5-base", - "BornholmBitextMining": 40.09 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "e5-base", - "AngryTweetsClassification": 45.06, - "DKHateClassification": 58.51, - "DanishPoliticalCommentsClassification": 28.43, - "LccSentimentClassification": 37.47, - "MassiveIntentClassification (da)": 44.25, - "MassiveIntentClassification (nb)": 41.57, - "MassiveIntentClassification (sv)": 41.34, - "MassiveScenarioClassification (da)": 52.99, - "MassiveScenarioClassification (nb)": 50.33, - "MassiveScenarioClassification (sv)": 50.0, - "NoRecClassification": 42.0, - "NordicLangClassification": 59.34, - "NorwegianParliament": 57.42, - "ScalaDaClassification": 50.08, - "ScalaNbClassification": 50.18 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "e5-base" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "e5-base" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "e5-base" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "e5-base", - "LEMBNarrativeQARetrieval": 25.31, - "LEMBNeedleRetrieval": 28.5, - "LEMBPasskeyRetrieval": 33.25, - "LEMBQMSumRetrieval": 23.83, - "LEMBSummScreenFDRetrieval": 74.67, - "LEMBWikimQARetrieval": 55.85 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "e5-base" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "e5-base" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "e5-base" - } - ] - } - }, - "elser-v2": { - "BitextMining": { - "f1": [ - { - "Model": "elser-v2" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "elser-v2", - "AmazonCounterfactualClassification (en)": 74.16, - "AmazonPolarityClassification": 61.91, - "AmazonReviewsClassification (en)": 32.06, - "Banking77Classification": 82.05, - "EmotionClassification": 46.65, - "ImdbClassification": 65.02, - "MTOPDomainClassification (en)": 93.17, - "MTOPIntentClassification (en)": 71.1, - "MassiveIntentClassification (en)": 68.48, - "MassiveScenarioClassification (en)": 74.98, - "ToxicConversationsClassification": 68.15, - "TweetSentimentExtractionClassification": 53.57 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "elser-v2", - "ArxivClusteringP2P": 35.27, - "ArxivClusteringS2S": 23.18, - "BiorxivClusteringP2P": 31.13, - "BiorxivClusteringS2S": 26.78, - "MedrxivClusteringP2P": 24.65, - "MedrxivClusteringS2S": 24.21, - "RedditClustering": 38.74, - "RedditClusteringP2P": 51.92, - "StackExchangeClustering": 42.7, - "StackExchangeClusteringP2P": 28.7, - "TwentyNewsgroupsClustering": 27.82 + "Model": "paraphrase-multilingual-mpnet-base-v2", + "8TagsClustering": 25.62, + "AlloProfClusteringP2P": 54.49, + "AlloProfClusteringS2S": 44.79, + "ArxivClusteringP2P": 37.78, + "ArxivClusteringS2S": 31.68, + "BiorxivClusteringP2P": 33.02, + "BiorxivClusteringS2S": 29.45, + "BlurbsClusteringP2P": 34.38, + "BlurbsClusteringS2S": 15.81, + "GeoreviewClusteringP2P (rus-Cyrl)": 56.18, + "HALClusteringS2S": 23.97, + "MLSUMClusteringP2P": 40.55, + "MLSUMClusteringP2P (rus-Cyrl)": 35.95, + "MLSUMClusteringS2S": 37.53, + "MLSUMClusteringS2S (rus-Cyrl)": 38.88, + "MasakhaNEWSClusteringP2P (fra)": 41.57, + "MasakhaNEWSClusteringP2P (amh-Ethi)": 46.85, + "MasakhaNEWSClusteringP2P (eng)": 47.3, + "MasakhaNEWSClusteringP2P (fra-Latn)": 53.3, + "MasakhaNEWSClusteringP2P (hau-Latn)": 27.61, + "MasakhaNEWSClusteringP2P (ibo-Latn)": 41.32, + "MasakhaNEWSClusteringP2P (lin-Latn)": 58.37, + "MasakhaNEWSClusteringP2P (lug-Latn)": 47.56, + "MasakhaNEWSClusteringP2P (orm-Ethi)": 24.53, + "MasakhaNEWSClusteringP2P (pcm-Latn)": 66.55, + "MasakhaNEWSClusteringP2P (run-Latn)": 51.97, + "MasakhaNEWSClusteringP2P (sna-Latn)": 45.55, + "MasakhaNEWSClusteringP2P (som-Latn)": 33.98, + "MasakhaNEWSClusteringP2P (swa-Latn)": 25.03, + "MasakhaNEWSClusteringP2P (tir-Ethi)": 48.33, + "MasakhaNEWSClusteringP2P (xho-Latn)": 29.47, + "MasakhaNEWSClusteringP2P (yor-Latn)": 28.25, + "MasakhaNEWSClusteringS2S (fra)": 30.88, + "MasakhaNEWSClusteringS2S (amh-Ethi)": 51.54, + "MasakhaNEWSClusteringS2S (eng)": 43.28, + "MasakhaNEWSClusteringS2S (fra-Latn)": 37.92, + "MasakhaNEWSClusteringS2S (hau-Latn)": 17.97, + "MasakhaNEWSClusteringS2S (ibo-Latn)": 34.56, + "MasakhaNEWSClusteringS2S (lin-Latn)": 57.43, + "MasakhaNEWSClusteringS2S (lug-Latn)": 45.22, + "MasakhaNEWSClusteringS2S (orm-Ethi)": 21.9, + "MasakhaNEWSClusteringS2S (pcm-Latn)": 62.1, + "MasakhaNEWSClusteringS2S (run-Latn)": 46.81, + "MasakhaNEWSClusteringS2S (sna-Latn)": 43.15, + "MasakhaNEWSClusteringS2S (som-Latn)": 29.44, + "MasakhaNEWSClusteringS2S (swa-Latn)": 10.31, + "MasakhaNEWSClusteringS2S (tir-Ethi)": 51.95, + "MasakhaNEWSClusteringS2S (xho-Latn)": 21.26, + "MasakhaNEWSClusteringS2S (yor-Latn)": 28.88, + "MedrxivClusteringP2P": 31.93, + "MedrxivClusteringS2S": 31.53, + "RedditClustering": 45.65, + "RedditClusteringP2P": 52.05, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 48.47, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 42.9, + "StackExchangeClustering": 52.99, + "StackExchangeClusteringP2P": 33.06, + "TenKGnadClusteringP2P": 35.96, + "TenKGnadClusteringS2S": 22.0, + "TwentyNewsgroupsClustering": 44.36 } ] }, "PairClassification": { "ap": [ { - "Model": "elser-v2", - "SprintDuplicateQuestions": 94.53, - "TwitterSemEval2015": 64.41, - "TwitterURLCorpus": 85.01 + "Model": "paraphrase-multilingual-mpnet-base-v2", + "CDSC-E": 75.76, + "CDSC-E (pol-Latn)": 75.77, + "OpusparcusPC (fr)": 93.45, + "OpusparcusPC (deu-Latn)": 97.34, + "OpusparcusPC (en)": 98.59, + "OpusparcusPC (fin-Latn)": 95.33, + "OpusparcusPC (fra-Latn)": 93.45, + "OpusparcusPC (rus-Cyrl)": 90.47, + "OpusparcusPC (swe-Latn)": 95.16, + "PPC": 93.67, + "PSC": 98.26, + "PSC (pol-Latn)": 98.26, + "PawsXPairClassification (fr)": 58.14, + "PawsXPairClassification (deu-Latn)": 55.69, + "PawsXPairClassification (en)": 60.12, + "PawsXPairClassification (spa-Latn)": 56.94, + "PawsXPairClassification (fra-Latn)": 58.14, + "PawsXPairClassification (jpn-Hira)": 49.37, + "PawsXPairClassification (kor-Hang)": 50.66, + "PawsXPairClassification (cmn-Hans)": 55.47, + "SICK-E-PL": 77.22, + "SICK-E-PL (pol-Latn)": 77.22, + "SprintDuplicateQuestions": 90.55, + "TERRa (rus-Cyrl)": 64.57, + "TwitterSemEval2015": 66.75, + "TwitterURLCorpus": 85.14 } ] }, "Reranking": { "map": [ { - "Model": "elser-v2", - "AskUbuntuDupQuestions": 58.31, - "MindSmallReranking": 30.75, - "SciDocsRR": 75.62, - "StackOverflowDupQuestions": 48.4 + "Model": "paraphrase-multilingual-mpnet-base-v2", + "AlloprofReranking": 54.34, + "AlloprofReranking (fra-Latn)": 67.2, + "AskUbuntuDupQuestions": 60.16, + "MMarcoReranking (cmn-Hans)": 14.57, + "MindSmallReranking": 30.15, + "RuBQReranking (rus-Cyrl)": 58.77, + "SciDocsRR": 78.09, + "StackOverflowDupQuestions": 46.78, + "SyntecReranking": 83.23, + "SyntecReranking (fra-Latn)": 80.97, + "T2Reranking (cmn-Hans)": 64.49 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "elser-v2", - "ArguAna": 55.98, - "CQADupstackRetrieval": 34.27, - "ClimateFEVER": 27.08, - "DBPedia": 42.7, - "FEVER": 78.55, - "FiQA2018": 41.57, - "HotpotQA": 67.01, - "MSMARCO": 38.9, - "NFCorpus": 36.66, - "NQ": 55.84, - "QuoraRetrieval": 84.69, - "SCIDOCS": 16.24, - "SciFact": 71.8, - "TRECCOVID": 72.72, - "Touche2020": 26.27 - } - ] - }, - "STS": { - "spearman": [ + "Model": "paraphrase-multilingual-mpnet-base-v2", + "AILACasedocs": 17.45, + "AILAStatutes": 22.24, + "ARCChallenge": 7.19, + "AlloprofRetrieval": 30.8, + "AlloprofRetrieval (fra-Latn)": 30.8, + "AlphaNLI": 21.87, + "ArguAna": 48.91, + "ArguAna-PL": 42.62, + "ArguAna-PL (pol-Latn)": 42.61, + "BSARDRetrieval": 0.0, + "BSARDRetrieval (fra-Latn)": 13.19, + "CQADupstackRetrieval": 31.32, + "ClimateFEVER": 15.27, + "CmedqaRetrieval (cmn-Hans)": 10.15, + "CovidRetrieval (cmn-Hans)": 28.85, + "DBPedia": 26.22, + "DBPedia-PL": 20.18, + "DuRetrieval (cmn-Hans)": 33.41, + "EcomRetrieval (cmn-Hans)": 9.69, + "FEVER": 56.76, + "FiQA-PL": 14.68, + "FiQA-PL (pol-Latn)": 14.71, + "FiQA2018": 22.96, + "GerDaLIRSmall (deu-Latn)": 3.0, + "HellaSwag": 17.53, + "HotpotQA": 37.03, + "HotpotQA-PL": 29.36, + "LEMBNarrativeQARetrieval": 16.02, + "LEMBNeedleRetrieval": 14.0, + "LEMBPasskeyRetrieval": 7.75, + "LEMBQMSumRetrieval": 12.23, + "LEMBSummScreenFDRetrieval": 41.15, + "LEMBWikimQARetrieval": 38.86, + "LeCaRDv2 (zho-Hans)": 33.91, + "LegalBenchConsumerContractsQA": 52.37, + "LegalBenchCorporateLobbying": 87.62, + "LegalQuAD (deu-Latn)": 17.8, + "LegalSummarization": 56.8, + "MMarcoRetrieval (cmn-Hans)": 44.62, + "MSMARCO": 26.6, + "MSMARCO-PL": 12.45, + "MedicalRetrieval (cmn-Hans)": 14.1, + "MintakaRetrieval (fr)": 24.45, + "MintakaRetrieval (ara-Arab)": 14.55, + "MintakaRetrieval (deu-Latn)": 25.43, + "MintakaRetrieval (spa-Latn)": 24.94, + "MintakaRetrieval (fra-Latn)": 24.45, + "MintakaRetrieval (hin-Deva)": 18.67, + "MintakaRetrieval (ita-Latn)": 25.62, + "MintakaRetrieval (jpn-Hira)": 15.46, + "MintakaRetrieval (por-Latn)": 26.15, + "NFCorpus": 25.49, + "NFCorpus-PL": 18.53, + "NFCorpus-PL (pol-Latn)": 18.54, + "NQ": 33.6, + "NQ-PL": 15.64, + "PIQA": 18.65, + "Quail": 2.98, + "Quora-PL": 79.18, + "QuoraRetrieval": 86.4, + "RARbCode": 11.02, + "RARbMath": 30.93, + "RiaNewsRetrieval (rus-Cyrl)": 51.75, + "RuBQRetrieval (rus-Cyrl)": 37.04, + "SCIDOCS": 13.97, + "SCIDOCS-PL": 11.18, + "SCIDOCS-PL (pol-Latn)": 11.17, + "SIQA": 1.21, + "SciFact": 50.3, + "SciFact-PL": 41.53, + "SciFact-PL (pol-Latn)": 41.55, + "SpartQA": 5.69, + "SyntecRetrieval": 76.0, + "SyntecRetrieval (fra-Latn)": 76.0, + "T2Retrieval (cmn-Hans)": 28.35, + "TRECCOVID": 37.87, + "TRECCOVID-PL": 35.38, + "TRECCOVID-PL (pol-Latn)": 35.43, + "TempReasonL1": 1.94, + "TempReasonL2Fact": 5.34, + "TempReasonL2Pure": 0.33, + "TempReasonL3Fact": 6.79, + "TempReasonL3Pure": 3.19, + "Touche2020": 17.4, + "VideoRetrieval (cmn-Hans)": 14.18, + "WinoGrande": 49.01, + "XPQARetrieval (fr)": 46.22, + "XPQARetrieval (ara-Arab_ara-Arab)": 24.86, + "XPQARetrieval (eng-Latn_ara-Arab)": 19.6, + "XPQARetrieval (ara-Arab_eng-Latn)": 28.21, + "XPQARetrieval (deu-Latn_deu-Latn)": 48.81, + "XPQARetrieval (eng-Latn_deu-Latn)": 31.93, + "XPQARetrieval (deu-Latn_eng-Latn)": 53.26, + "XPQARetrieval (spa-Latn_spa-Latn)": 41.08, + "XPQARetrieval (eng-Latn_spa-Latn)": 30.05, + "XPQARetrieval (spa-Latn_eng-Latn)": 43.4, + "XPQARetrieval (fra-Latn_fra-Latn)": 46.22, + "XPQARetrieval (eng-Latn_fra-Latn)": 29.55, + "XPQARetrieval (fra-Latn_eng-Latn)": 47.3, + "XPQARetrieval (hin-Deva_hin-Deva)": 50.74, + "XPQARetrieval (eng-Latn_hin-Deva)": 24.97, + "XPQARetrieval (hin-Deva_eng-Latn)": 49.24, + "XPQARetrieval (ita-Latn_ita-Latn)": 52.87, + "XPQARetrieval (eng-Latn_ita-Latn)": 33.44, + "XPQARetrieval (ita-Latn_eng-Latn)": 51.49, + "XPQARetrieval (jpn-Hira_jpn-Hira)": 53.17, + "XPQARetrieval (eng-Latn_jpn-Hira)": 26.66, + "XPQARetrieval (jpn-Hira_eng-Latn)": 49.86, + "XPQARetrieval (kor-Hang_kor-Hang)": 24.9, + "XPQARetrieval (eng-Latn_kor-Hang)": 24.5, + "XPQARetrieval (kor-Hang_eng-Latn)": 24.57, + "XPQARetrieval (pol-Latn_pol-Latn)": 29.36, + "XPQARetrieval (eng-Latn_pol-Latn)": 20.48, + "XPQARetrieval (pol-Latn_eng-Latn)": 29.31, + "XPQARetrieval (por-Latn_por-Latn)": 34.26, + "XPQARetrieval (eng-Latn_por-Latn)": 21.72, + "XPQARetrieval (por-Latn_eng-Latn)": 37.62, + "XPQARetrieval (tam-Taml_tam-Taml)": 19.8, + "XPQARetrieval (eng-Latn_tam-Taml)": 13.93, + "XPQARetrieval (tam-Taml_eng-Latn)": 18.26, + "XPQARetrieval (cmn-Hans_cmn-Hans)": 42.54, + "XPQARetrieval (eng-Latn_cmn-Hans)": 20.91, + "XPQARetrieval (cmn-Hans_eng-Latn)": 42.81 + } + ] + }, + "STS": { + "spearman": [ { - "Model": "elser-v2", - "BIOSSES": 83.79, - "SICK-R": 68.78, - "STS12": 64.81, - "STS13": 80.1, - "STS14": 74.96, - "STS15": 83.7, - "STS16": 80.55, - "STS17 (en-en)": 85.74, - "STS22 (en)": 67.5, - "STSBenchmark": 79.54 + "Model": "paraphrase-multilingual-mpnet-base-v2", + "AFQMC (cmn-Hans)": 15.69, + "ATEC (cmn-Hans)": 20.27, + "BIOSSES": 76.27, + "BQ (cmn-Hans)": 36.33, + "CDSC-R": 88.8, + "CDSC-R (pol-Latn)": 88.8, + "LCQMC (cmn-Hans)": 63.3, + "PAWSX (cmn-Hans)": 12.16, + "RUParaPhraserSTS (rus-Cyrl)": 65.74, + "RuSTSBenchmarkSTS (rus-Cyrl)": 82.46, + "SICK-R": 79.62, + "SICK-R-PL": 73.13, + "SICK-R-PL (pol-Latn)": 73.13, + "SICKFr": 75.56, + "SICKFr (fra-Latn)": 75.56, + "STS12": 77.9, + "STS13": 85.11, + "STS14": 80.81, + "STS15": 87.48, + "STS16": 83.2, + "STS17 (ar-ar)": 79.1, + "STS17 (en-ar)": 80.85, + "STS17 (en-de)": 83.28, + "STS17 (en-en)": 86.99, + "STS17 (en-tr)": 74.9, + "STS17 (es-en)": 86.11, + "STS17 (es-es)": 85.14, + "STS17 (fr-en)": 81.17, + "STS17 (it-en)": 84.24, + "STS17 (ko-ko)": 83.41, + "STS17 (nl-en)": 82.51, + "STS17 (eng-Latn_deu-Latn)": 83.28, + "STS17 (eng-Latn_tur-Latn)": 74.9, + "STS17 (eng-Latn_ara-Arab)": 80.85, + "STS17 (ara-Arab)": 79.1, + "STS17 (nld-Latn_eng-Latn)": 82.51, + "STS17 (fra-Latn_eng-Latn)": 81.17, + "STS17 (ita-Latn_eng-Latn)": 84.24, + "STS17 (spa-Latn_eng-Latn)": 86.11, + "STS17 (spa-Latn)": 85.14, + "STS17 (kor-Hang)": 83.41, + "STS22 (pl)": 33.64, + "STS22 (fr)": 74.3, + "STS22 (spa-Latn)": 59.91, + "STS22 (en)": 63.52, + "STS22 (spa-Latn_ita-Latn)": 53.7, + "STS22 (pol-Latn)": 33.65, + "STS22 (ara-Arab)": 52.19, + "STS22 (deu-Latn)": 46.7, + "STS22 (fra-Latn)": 74.3, + "STS22 (deu-Latn_pol-Latn)": 40.53, + "STS22 (tur-Latn)": 56.3, + "STS22 (cmn-Hans_eng-Latn)": 67.96, + "STS22 (pol-Latn_eng-Latn)": 73.07, + "STS22 (rus-Cyrl)": 58.74, + "STS22 (cmn-Hans)": 61.75, + "STS22 (spa-Latn_eng-Latn)": 70.26, + "STS22 (fra-Latn_pol-Latn)": 84.52, + "STS22 (deu-Latn_eng-Latn)": 50.81, + "STS22 (deu-Latn_fra-Latn)": 62.34, + "STS22 (ita-Latn)": 60.65, + "STSB (cmn-Hans)": 80.84, + "STSBenchmark": 86.82, + "STSBenchmarkMultilingualSTS (fr)": 84.69, + "STSBenchmarkMultilingualSTS (nld-Latn)": 83.36, + "STSBenchmarkMultilingualSTS (deu-Latn)": 83.56, + "STSBenchmarkMultilingualSTS (fra-Latn)": 84.69, + "STSBenchmarkMultilingualSTS (spa-Latn)": 84.61, + "STSBenchmarkMultilingualSTS (cmn-Hans)": 81.98, + "STSBenchmarkMultilingualSTS (en)": 86.82, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 82.45, + "STSBenchmarkMultilingualSTS (por-Latn)": 84.0, + "STSBenchmarkMultilingualSTS (ita-Latn)": 84.09, + "STSBenchmarkMultilingualSTS (pol-Latn)": 81.46 } ] }, "Summarization": { "spearman": [ { - "Model": "elser-v2", - "SummEval": 31.03 + "Model": "paraphrase-multilingual-mpnet-base-v2", + "SummEval": 31.57, + "SummEvalFr": 29.47, + "SummEvalFr (fra-Latn)": 29.47 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "elser-v2" + "Model": "paraphrase-multilingual-mpnet-base-v2" } ] } }, - "gtr-t5-xxl": { + "sentence-t5-xl": { "BitextMining": { "f1": [ { - "Model": "gtr-t5-xxl" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "gtr-t5-xxl", - "AmazonCounterfactualClassification (en)": 67.3, - "AmazonPolarityClassification": 75.05, - "AmazonReviewsClassification (en)": 37.3, - "Banking77Classification": 82.32, - "EmotionClassification": 43.19, - "ImdbClassification": 70.8, - "MTOPDomainClassification (en)": 93.84, - "MTOPIntentClassification (en)": 67.71, - "MassiveIntentClassification (en)": 70.61, - "MassiveScenarioClassification (en)": 77.77, - "ToxicConversationsClassification": 68.48, - "TweetSentimentExtractionClassification": 54.54 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "gtr-t5-xxl", - "ArxivClusteringP2P": 37.9, - "ArxivClusteringS2S": 32.39, - "BiorxivClusteringP2P": 30.48, - "BiorxivClusteringS2S": 27.5, - "MedrxivClusteringP2P": 29.12, - "MedrxivClusteringS2S": 27.56, - "RedditClustering": 64.13, - "RedditClusteringP2P": 62.84, - "StackExchangeClustering": 71.43, - "StackExchangeClusteringP2P": 32.85, - "TwentyNewsgroupsClustering": 50.44 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "gtr-t5-xxl", - "SprintDuplicateQuestions": 95.68, - "TwitterSemEval2015": 77.54, - "TwitterURLCorpus": 85.13 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "gtr-t5-xxl", - "AskUbuntuDupQuestions": 63.23, - "MindSmallReranking": 31.93, - "SciDocsRR": 77.96, - "StackOverflowDupQuestions": 53.5 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "gtr-t5-xxl", - "ArguAna": 53.77, - "CQADupstackRetrieval": 38.56, - "ClimateFEVER": 27.21, - "DBPedia": 41.28, - "FEVER": 74.08, - "FiQA2018": 46.78, - "HotpotQA": 59.67, - "MSMARCO": 44.05, - "NFCorpus": 34.18, - "NQ": 57.24, - "QuoraRetrieval": 89.09, - "SCIDOCS": 15.88, - "SciFact": 66.77, - "TRECCOVID": 51.9, - "Touche2020": 26.76 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "gtr-t5-xxl", - "BIOSSES": 81.91, - "SICK-R": 74.29, - "STS12": 70.12, - "STS13": 82.72, - "STS14": 78.24, - "STS15": 86.26, - "STS16": 81.61, - "STS17 (en-en)": 85.18, - "STS22 (en)": 65.76, - "STSBenchmark": 77.73 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "gtr-t5-xxl", - "SummEval": 30.64 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "gtr-t5-xxl" - } - ] - } - }, - "norbert3-large": { - "BitextMining": { - "f1": [ - { - "Model": "norbert3-large", - "BornholmBitextMining": 2.9 + "Model": "sentence-t5-xl", + "BUCC (de-en)": 95.04, + "BUCC (fr-en)": 94.96, + "BUCC (ru-en)": 8.33, + "BUCC (zh-en)": 1.3, + "Tatoeba (afr-eng)": 41.84, + "Tatoeba (amh-eng)": 0.03, + "Tatoeba (ang-eng)": 37.87, + "Tatoeba (ara-eng)": 0.61, + "Tatoeba (arq-eng)": 0.74, + "Tatoeba (arz-eng)": 0.42, + "Tatoeba (ast-eng)": 65.41, + "Tatoeba (awa-eng)": 1.46, + "Tatoeba (aze-eng)": 8.79, + "Tatoeba (bel-eng)": 5.76, + "Tatoeba (ben-eng)": 0.01, + "Tatoeba (ber-eng)": 5.92, + "Tatoeba (bos-eng)": 16.12, + "Tatoeba (bre-eng)": 6.12, + "Tatoeba (bul-eng)": 9.06, + "Tatoeba (cat-eng)": 57.4, + "Tatoeba (cbk-eng)": 57.68, + "Tatoeba (ceb-eng)": 12.56, + "Tatoeba (ces-eng)": 9.47, + "Tatoeba (cha-eng)": 27.13, + "Tatoeba (cmn-eng)": 1.82, + "Tatoeba (cor-eng)": 3.87, + "Tatoeba (csb-eng)": 14.41, + "Tatoeba (cym-eng)": 6.69, + "Tatoeba (dan-eng)": 54.87, + "Tatoeba (deu-eng)": 93.72, + "Tatoeba (dsb-eng)": 14.74, + "Tatoeba (dtp-eng)": 5.84, + "Tatoeba (ell-eng)": 0.6, + "Tatoeba (epo-eng)": 30.8, + "Tatoeba (est-eng)": 5.39, + "Tatoeba (eus-eng)": 11.9, + "Tatoeba (fao-eng)": 28.08, + "Tatoeba (fin-eng)": 6.81, + "Tatoeba (fra-eng)": 85.29, + "Tatoeba (fry-eng)": 38.68, + "Tatoeba (gla-eng)": 2.96, + "Tatoeba (gle-eng)": 3.74, + "Tatoeba (glg-eng)": 70.0, + "Tatoeba (gsw-eng)": 30.49, + "Tatoeba (heb-eng)": 0.87, + "Tatoeba (hin-eng)": 0.1, + "Tatoeba (hrv-eng)": 17.43, + "Tatoeba (hsb-eng)": 14.69, + "Tatoeba (hun-eng)": 7.28, + "Tatoeba (hye-eng)": 0.77, + "Tatoeba (ido-eng)": 46.65, + "Tatoeba (ile-eng)": 59.43, + "Tatoeba (ina-eng)": 82.71, + "Tatoeba (ind-eng)": 37.26, + "Tatoeba (isl-eng)": 11.21, + "Tatoeba (ita-eng)": 79.77, + "Tatoeba (jav-eng)": 7.81, + "Tatoeba (jpn-eng)": 0.91, + "Tatoeba (kab-eng)": 2.23, + "Tatoeba (kat-eng)": 1.48, + "Tatoeba (kaz-eng)": 1.77, + "Tatoeba (khm-eng)": 0.38, + "Tatoeba (kor-eng)": 1.96, + "Tatoeba (kur-eng)": 12.11, + "Tatoeba (kzj-eng)": 6.13, + "Tatoeba (lat-eng)": 27.84, + "Tatoeba (lfn-eng)": 45.89, + "Tatoeba (lit-eng)": 5.94, + "Tatoeba (lvs-eng)": 8.11, + "Tatoeba (mal-eng)": 0.59, + "Tatoeba (mar-eng)": 0.03, + "Tatoeba (max-eng)": 21.7, + "Tatoeba (mhr-eng)": 0.68, + "Tatoeba (mkd-eng)": 5.92, + "Tatoeba (mon-eng)": 2.39, + "Tatoeba (nds-eng)": 45.04, + "Tatoeba (nld-eng)": 64.75, + "Tatoeba (nno-eng)": 36.74, + "Tatoeba (nob-eng)": 54.77, + "Tatoeba (nov-eng)": 57.12, + "Tatoeba (oci-eng)": 34.39, + "Tatoeba (orv-eng)": 2.04, + "Tatoeba (pam-eng)": 8.34, + "Tatoeba (pes-eng)": 0.87, + "Tatoeba (pms-eng)": 38.06, + "Tatoeba (pol-eng)": 28.35, + "Tatoeba (por-eng)": 83.61, + "Tatoeba (ron-eng)": 65.27, + "Tatoeba (rus-eng)": 30.42, + "Tatoeba (slk-eng)": 13.19, + "Tatoeba (slv-eng)": 13.49, + "Tatoeba (spa-eng)": 89.18, + "Tatoeba (sqi-eng)": 14.66, + "Tatoeba (srp-eng)": 13.24, + "Tatoeba (swe-eng)": 60.67, + "Tatoeba (swg-eng)": 34.76, + "Tatoeba (swh-eng)": 8.07, + "Tatoeba (tam-eng)": 0.36, + "Tatoeba (tat-eng)": 1.46, + "Tatoeba (tel-eng)": 0.67, + "Tatoeba (tgl-eng)": 25.22, + "Tatoeba (tha-eng)": 1.58, + "Tatoeba (tuk-eng)": 4.99, + "Tatoeba (tur-eng)": 7.72, + "Tatoeba (tzl-eng)": 38.49, + "Tatoeba (uig-eng)": 0.87, + "Tatoeba (ukr-eng)": 9.12, + "Tatoeba (urd-eng)": 0.0, + "Tatoeba (uzb-eng)": 5.48, + "Tatoeba (vie-eng)": 8.45, + "Tatoeba (war-eng)": 13.75, + "Tatoeba (wuu-eng)": 1.44, + "Tatoeba (xho-eng)": 9.15, + "Tatoeba (yid-eng)": 0.28, + "Tatoeba (yue-eng)": 0.98, + "Tatoeba (zsm-eng)": 35.71 } ] }, "Classification": { "accuracy": [ { - "Model": "norbert3-large", - "AngryTweetsClassification": 49.04, - "DKHateClassification": 62.71, - "DanishPoliticalCommentsClassification": 33.53, - "LccSentimentClassification": 46.93, - "MassiveIntentClassification (da)": 45.98, - "MassiveIntentClassification (nb)": 47.42, - "MassiveIntentClassification (sv)": 48.47, - "MassiveScenarioClassification (da)": 50.51, - "MassiveScenarioClassification (nb)": 54.25, - "MassiveScenarioClassification (sv)": 50.6, - "NoRecClassification": 50.46, - "NordicLangClassification": 84.25, - "NorwegianParliament": 58.85, - "ScalaDaClassification": 60.72, - "ScalaNbClassification": 66.79 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "norbert3-large" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "norbert3-large" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "norbert3-large" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "norbert3-large" - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "norbert3-large" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "norbert3-large" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "norbert3-large" - } - ] - } - }, - "sentence-bert-swedish-cased": { - "BitextMining": { - "f1": [ - { - "Model": "sentence-bert-swedish-cased", - "BornholmBitextMining": 14.08 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "sentence-bert-swedish-cased", - "AngryTweetsClassification": 44.46, - "DKHateClassification": 59.36, - "DanishPoliticalCommentsClassification": 28.32, - "LccSentimentClassification": 47.2, - "MassiveIntentClassification (da)": 42.84, - "MassiveIntentClassification (nb)": 42.74, - "MassiveIntentClassification (sv)": 69.11, - "MassiveScenarioClassification (da)": 49.64, - "MassiveScenarioClassification (nb)": 49.49, - "MassiveScenarioClassification (sv)": 75.96, - "NoRecClassification": 43.53, - "NordicLangClassification": 51.45, - "NorwegianParliament": 55.74, - "ScalaDaClassification": 50.12, - "ScalaNbClassification": 50.34 + "Model": "sentence-t5-xl", + "AmazonCounterfactualClassification (de)": 67.01, + "AmazonCounterfactualClassification (en)": 76.01, + "AmazonCounterfactualClassification (en-ext)": 77.29, + "AmazonCounterfactualClassification (ja)": 45.61, + "AmazonPolarityClassification": 93.17, + "AmazonReviewsClassification (de)": 44.05, + "AmazonReviewsClassification (en)": 48.18, + "AmazonReviewsClassification (es)": 45.01, + "AmazonReviewsClassification (fr)": 43.52, + "AmazonReviewsClassification (ja)": 22.23, + "AmazonReviewsClassification (zh)": 21.88, + "Banking77Classification": 80.88, + "EmotionClassification": 51.95, + "ImdbClassification": 87.54, + "MTOPDomainClassification (de)": 83.28, + "MTOPDomainClassification (en)": 90.73, + "MTOPDomainClassification (es)": 85.32, + "MTOPDomainClassification (fr)": 85.14, + "MTOPDomainClassification (hi)": 20.85, + "MTOPDomainClassification (th)": 15.62, + "MTOPIntentClassification (de)": 54.65, + "MTOPIntentClassification (en)": 68.15, + "MTOPIntentClassification (es)": 57.38, + "MTOPIntentClassification (fr)": 54.39, + "MTOPIntentClassification (hi)": 3.28, + "MTOPIntentClassification (th)": 5.08, + "MasakhaNEWSClassification (fra)": 80.09, + "MassiveIntentClassification (af)": 40.17, + "MassiveIntentClassification (am)": 2.18, + "MassiveIntentClassification (ar)": 4.18, + "MassiveIntentClassification (az)": 30.02, + "MassiveIntentClassification (bn)": 2.6, + "MassiveIntentClassification (cy)": 29.15, + "MassiveIntentClassification (da)": 47.69, + "MassiveIntentClassification (de)": 57.43, + "MassiveIntentClassification (el)": 9.96, + "MassiveIntentClassification (en)": 72.09, + "MassiveIntentClassification (es)": 57.97, + "MassiveIntentClassification (fa)": 3.6, + "MassiveIntentClassification (fi)": 34.02, + "MassiveIntentClassification (fr)": 60.99, + "MassiveIntentClassification (he)": 2.51, + "MassiveIntentClassification (hi)": 3.02, + "MassiveIntentClassification (hu)": 31.66, + "MassiveIntentClassification (hy)": 3.32, + "MassiveIntentClassification (id)": 41.53, + "MassiveIntentClassification (is)": 30.25, + "MassiveIntentClassification (it)": 56.57, + "MassiveIntentClassification (ja)": 3.5, + "MassiveIntentClassification (jv)": 31.67, + "MassiveIntentClassification (ka)": 2.79, + "MassiveIntentClassification (km)": 5.43, + "MassiveIntentClassification (kn)": 2.79, + "MassiveIntentClassification (ko)": 2.67, + "MassiveIntentClassification (lv)": 34.25, + "MassiveIntentClassification (ml)": 2.98, + "MassiveIntentClassification (mn)": 20.99, + "MassiveIntentClassification (ms)": 37.43, + "MassiveIntentClassification (my)": 4.02, + "MassiveIntentClassification (nb)": 45.91, + "MassiveIntentClassification (nl)": 50.51, + "MassiveIntentClassification (pl)": 43.95, + "MassiveIntentClassification (pt)": 57.95, + "MassiveIntentClassification (ro)": 49.37, + "MassiveIntentClassification (ru)": 33.46, + "MassiveIntentClassification (sl)": 36.33, + "MassiveIntentClassification (sq)": 37.65, + "MassiveIntentClassification (sv)": 46.35, + "MassiveIntentClassification (sw)": 30.6, + "MassiveIntentClassification (ta)": 1.79, + "MassiveIntentClassification (te)": 2.26, + "MassiveIntentClassification (th)": 4.02, + "MassiveIntentClassification (tl)": 38.92, + "MassiveIntentClassification (tr)": 32.05, + "MassiveIntentClassification (ur)": 2.7, + "MassiveIntentClassification (vi)": 21.47, + "MassiveIntentClassification (zh-CN)": 0.59, + "MassiveIntentClassification (zh-TW)": 3.24, + "MassiveScenarioClassification (af)": 50.81, + "MassiveScenarioClassification (am)": 6.95, + "MassiveScenarioClassification (ar)": 12.32, + "MassiveScenarioClassification (az)": 38.79, + "MassiveScenarioClassification (bn)": 8.0, + "MassiveScenarioClassification (cy)": 33.91, + "MassiveScenarioClassification (da)": 55.79, + "MassiveScenarioClassification (de)": 65.33, + "MassiveScenarioClassification (el)": 16.89, + "MassiveScenarioClassification (en)": 73.26, + "MassiveScenarioClassification (es)": 62.52, + "MassiveScenarioClassification (fa)": 6.08, + "MassiveScenarioClassification (fi)": 43.34, + "MassiveScenarioClassification (fr)": 66.42, + "MassiveScenarioClassification (he)": 7.55, + "MassiveScenarioClassification (hi)": 7.44, + "MassiveScenarioClassification (hu)": 40.85, + "MassiveScenarioClassification (hy)": 9.25, + "MassiveScenarioClassification (id)": 51.92, + "MassiveScenarioClassification (is)": 40.09, + "MassiveScenarioClassification (it)": 62.94, + "MassiveScenarioClassification (ja)": 7.9, + "MassiveScenarioClassification (jv)": 41.33, + "MassiveScenarioClassification (ka)": 7.76, + "MassiveScenarioClassification (km)": 9.19, + "MassiveScenarioClassification (kn)": 8.36, + "MassiveScenarioClassification (ko)": 6.13, + "MassiveScenarioClassification (lv)": 40.7, + "MassiveScenarioClassification (ml)": 6.98, + "MassiveScenarioClassification (mn)": 27.0, + "MassiveScenarioClassification (ms)": 46.9, + "MassiveScenarioClassification (my)": 9.55, + "MassiveScenarioClassification (nb)": 53.43, + "MassiveScenarioClassification (nl)": 59.65, + "MassiveScenarioClassification (pl)": 49.87, + "MassiveScenarioClassification (pt)": 62.18, + "MassiveScenarioClassification (ro)": 58.22, + "MassiveScenarioClassification (ru)": 40.73, + "MassiveScenarioClassification (sl)": 43.66, + "MassiveScenarioClassification (sq)": 49.25, + "MassiveScenarioClassification (sv)": 57.17, + "MassiveScenarioClassification (sw)": 40.55, + "MassiveScenarioClassification (ta)": 7.46, + "MassiveScenarioClassification (te)": 7.03, + "MassiveScenarioClassification (th)": 8.52, + "MassiveScenarioClassification (tl)": 51.74, + "MassiveScenarioClassification (tr)": 43.01, + "MassiveScenarioClassification (ur)": 9.61, + "MassiveScenarioClassification (vi)": 28.91, + "MassiveScenarioClassification (zh-CN)": 5.86, + "MassiveScenarioClassification (zh-TW)": 7.14, + "ToxicConversationsClassification": 70.95, + "TweetSentimentExtractionClassification": 61.21 } ] }, "Clustering": { "v_measure": [ { - "Model": "sentence-bert-swedish-cased" + "Model": "sentence-t5-xl", + "AlloProfClusteringP2P": 60.37, + "AlloProfClusteringS2S": 40.76, + "ArxivClusteringP2P": 41.62, + "ArxivClusteringS2S": 31.17, + "BiorxivClusteringP2P": 36.43, + "BiorxivClusteringS2S": 26.47, + "HALClusteringS2S": 20.28, + "MLSUMClusteringP2P": 41.61, + "MLSUMClusteringS2S": 33.6, + "MasakhaNEWSClusteringP2P (fra)": 62.82, + "MasakhaNEWSClusteringS2S (fra)": 31.74, + "MedrxivClusteringP2P": 32.3, + "MedrxivClusteringS2S": 26.93, + "RedditClustering": 57.03, + "RedditClusteringP2P": 62.34, + "StackExchangeClustering": 67.13, + "StackExchangeClusteringP2P": 34.79, + "TwentyNewsgroupsClustering": 49.53 } ] }, "PairClassification": { "ap": [ { - "Model": "sentence-bert-swedish-cased" + "Model": "sentence-t5-xl", + "OpusparcusPC (fr)": 92.48, + "PawsXPairClassification (fr)": 62.52, + "SprintDuplicateQuestions": 91.44, + "TwitterSemEval2015": 80.89, + "TwitterURLCorpus": 85.86 } ] }, "Reranking": { "map": [ { - "Model": "sentence-bert-swedish-cased" + "Model": "sentence-t5-xl", + "AlloprofReranking": 63.3, + "AskUbuntuDupQuestions": 62.86, + "MindSmallReranking": 29.77, + "SciDocsRR": 75.16, + "StackOverflowDupQuestions": 51.05, + "SyntecReranking": 83.07 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "sentence-bert-swedish-cased" + "Model": "sentence-t5-xl", + "AlloprofRetrieval": 40.38, + "ArguAna": 39.4, + "BSARDRetrieval": 0.14, + "CQADupstackRetrieval": 40.78, + "ClimateFEVER": 10.61, + "DBPedia": 33.65, + "FEVER": 36.12, + "FiQA2018": 44.71, + "HotpotQA": 37.17, + "MSMARCO": 25.17, + "MintakaRetrieval (fr)": 31.54, + "NFCorpus": 33.18, + "NQ": 46.29, + "QuoraRetrieval": 85.85, + "SCIDOCS": 15.97, + "SciFact": 50.91, + "SyntecRetrieval": 74.24, + "TRECCOVID": 54.77, + "Touche2020": 22.51, + "XPQARetrieval (fr)": 52.14 } ] }, "STS": { "spearman": [ { - "Model": "sentence-bert-swedish-cased" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "sentence-bert-swedish-cased" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "sentence-bert-swedish-cased" - } - ] - } - }, - "universal-sentence-encoder-multilingual-3": { - "BitextMining": { - "f1": [ - { - "Model": "universal-sentence-encoder-multilingual-3" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "universal-sentence-encoder-multilingual-3", - "AmazonReviewsClassification (fr)": 33.51, - "MTOPDomainClassification (fr)": 85.5, - "MTOPIntentClassification (fr)": 53.98, - "MasakhaNEWSClassification (fra)": 82.06, - "MassiveIntentClassification (fr)": 61.19, - "MassiveScenarioClassification (fr)": 70.22 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "universal-sentence-encoder-multilingual-3", - "AlloProfClusteringP2P": 56.9, - "AlloProfClusteringS2S": 37.84, - "HALClusteringS2S": 18.95, - "MLSUMClusteringP2P": 43.9, - "MLSUMClusteringS2S": 35.5, - "MasakhaNEWSClusteringP2P (fra)": 60.57, - "MasakhaNEWSClusteringS2S (fra)": 40.31 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "universal-sentence-encoder-multilingual-3", - "OpusparcusPC (fr)": 91.46, - "PawsXPairClassification (fr)": 52.39 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "universal-sentence-encoder-multilingual-3", - "AlloprofReranking": 56.23, - "SyntecReranking": 73.85 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "universal-sentence-encoder-multilingual-3", - "AlloprofRetrieval": 35.27, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 26.12, - "SyntecRetrieval": 69.82, - "XPQARetrieval (fr)": 59.59 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "universal-sentence-encoder-multilingual-3", - "SICKFr": 71.37, - "STS22 (fr)": 77.91, - "STSBenchmarkMultilingualSTS (fr)": 75.48 + "Model": "sentence-t5-xl", + "BIOSSES": 73.12, + "SICK-R": 79.98, + "SICKFr": 75.08, + "STS12": 79.02, + "STS13": 88.8, + "STS14": 84.33, + "STS15": 88.89, + "STS16": 85.31, + "STS17 (ar-ar)": 11.13, + "STS17 (en-ar)": -3.93, + "STS17 (en-de)": 79.04, + "STS17 (en-en)": 88.91, + "STS17 (en-tr)": 13.61, + "STS17 (es-en)": 71.72, + "STS17 (es-es)": 83.42, + "STS17 (fr-en)": 71.38, + "STS17 (it-en)": 69.5, + "STS17 (ko-ko)": 9.61, + "STS17 (nl-en)": 66.12, + "STS22 (ar)": 29.6, + "STS22 (de)": 47.72, + "STS22 (de-en)": 49.64, + "STS22 (de-fr)": 62.21, + "STS22 (de-pl)": 34.34, + "STS22 (en)": 64.32, + "STS22 (es)": 58.16, + "STS22 (es-en)": 69.15, + "STS22 (es-it)": 65.26, + "STS22 (fr)": 77.49, + "STS22 (fr-pl)": 50.71, + "STS22 (it)": 66.91, + "STS22 (pl)": 27.04, + "STS22 (pl-en)": 58.85, + "STS22 (ru)": 26.63, + "STS22 (tr)": 43.36, + "STS22 (zh)": 33.55, + "STS22 (zh-en)": 29.0, + "STSBenchmark": 83.93, + "STSBenchmarkMultilingualSTS (fr)": 79.42 } ] }, "Summarization": { "spearman": [ { - "Model": "universal-sentence-encoder-multilingual-3", - "SummEvalFr": 28.21 + "Model": "sentence-t5-xl", + "SummEval": 29.91, + "SummEvalFr": 31.59 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "universal-sentence-encoder-multilingual-3" + "Model": "sentence-t5-xl" } ] } }, - "text-similarity-ada-001": { + "multilingual-e5-small": { "BitextMining": { "f1": [ { - "Model": "text-similarity-ada-001" + "Model": "multilingual-e5-small", + "BornholmBitextMining (dan-Latn)": 37.15, + "BornholmBitextMining": 43.89, + "Tatoeba (swh-Latn_eng-Latn)": 65.43, + "Tatoeba (jpn-Jpan_eng-Latn)": 77.43, + "Tatoeba (tuk-Latn_eng-Latn)": 16.99, + "Tatoeba (lat-Latn_eng-Latn)": 37.76, + "Tatoeba (mal-Mlym_eng-Latn)": 94.78, + "Tatoeba (ast-Latn_eng-Latn)": 62.81, + "Tatoeba (est-Latn_eng-Latn)": 56.47, + "Tatoeba (cym-Latn_eng-Latn)": 62.3, + "Tatoeba (pol-Latn_eng-Latn)": 88.85, + "Tatoeba (ukr-Cyrl_eng-Latn)": 82.98, + "Tatoeba (ido-Latn_eng-Latn)": 70.07, + "Tatoeba (zsm-Latn_eng-Latn)": 91.37, + "Tatoeba (bul-Cyrl_eng-Latn)": 85.47, + "Tatoeba (dsb-Latn_eng-Latn)": 29.87, + "Tatoeba (tha-Thai_eng-Latn)": 90.88, + "Tatoeba (arz-Arab_eng-Latn)": 53.35, + "Tatoeba (cbk-Latn_eng-Latn)": 55.36, + "Tatoeba (pms-Latn_eng-Latn)": 35.47, + "Tatoeba (ber-Tfng_eng-Latn)": 18.22, + "Tatoeba (slk-Latn_eng-Latn)": 79.86, + "Tatoeba (ang-Latn_eng-Latn)": 30.3, + "Tatoeba (ind-Latn_eng-Latn)": 88.28, + "Tatoeba (cha-Latn_eng-Latn)": 24.88, + "Tatoeba (slv-Latn_eng-Latn)": 73.93, + "Tatoeba (kab-Latn_eng-Latn)": 18.06, + "Tatoeba (ina-Latn_eng-Latn)": 86.39, + "Tatoeba (lfn-Latn_eng-Latn)": 51.46, + "Tatoeba (hye-Armn_eng-Latn)": 83.81, + "Tatoeba (war-Latn_eng-Latn)": 39.14, + "Tatoeba (dtp-Latn_eng-Latn)": 6.42, + "Tatoeba (nds-Latn_eng-Latn)": 52.46, + "Tatoeba (urd-Arab_eng-Latn)": 85.07, + "Tatoeba (rus-Cyrl_eng-Latn)": 89.77, + "Tatoeba (fao-Latn_eng-Latn)": 56.57, + "Tatoeba (cat-Latn_eng-Latn)": 79.3, + "Tatoeba (gla-Latn_eng-Latn)": 35.96, + "Tatoeba (kur-Latn_eng-Latn)": 39.99, + "Tatoeba (cor-Latn_eng-Latn)": 5.24, + "Tatoeba (nov-Latn_eng-Latn)": 64.2, + "Tatoeba (max-Deva_eng-Latn)": 48.29, + "Tatoeba (nno-Latn_eng-Latn)": 70.29, + "Tatoeba (kor-Hang_eng-Latn)": 73.74, + "Tatoeba (vie-Latn_eng-Latn)": 89.03, + "Tatoeba (tur-Latn_eng-Latn)": 88.42, + "Tatoeba (spa-Latn_eng-Latn)": 93.01, + "Tatoeba (gsw-Latn_eng-Latn)": 40.13, + "Tatoeba (yid-Hebr_eng-Latn)": 65.9, + "Tatoeba (orv-Cyrl_eng-Latn)": 14.89, + "Tatoeba (wuu-Hans_eng-Latn)": 67.3, + "Tatoeba (heb-Hebr_eng-Latn)": 73.68, + "Tatoeba (arq-Arab_eng-Latn)": 23.62, + "Tatoeba (nld-Latn_eng-Latn)": 91.87, + "Tatoeba (kaz-Cyrl_eng-Latn)": 70.57, + "Tatoeba (mon-Cyrl_eng-Latn)": 77.7, + "Tatoeba (fin-Latn_eng-Latn)": 70.23, + "Tatoeba (hrv-Latn_eng-Latn)": 84.42, + "Tatoeba (fra-Latn_eng-Latn)": 90.51, + "Tatoeba (khm-Khmr_eng-Latn)": 44.34, + "Tatoeba (amh-Ethi_eng-Latn)": 74.11, + "Tatoeba (eus-Latn_eng-Latn)": 50.9, + "Tatoeba (lvs-Latn_eng-Latn)": 61.84, + "Tatoeba (pes-Arab_eng-Latn)": 85.51, + "Tatoeba (tzl-Latn_eng-Latn)": 34.83, + "Tatoeba (oci-Latn_eng-Latn)": 38.27, + "Tatoeba (ell-Grek_eng-Latn)": 86.81, + "Tatoeba (tgl-Latn_eng-Latn)": 77.54, + "Tatoeba (uig-Arab_eng-Latn)": 60.59, + "Tatoeba (ben-Beng_eng-Latn)": 81.4, + "Tatoeba (uzb-Latn_eng-Latn)": 59.11, + "Tatoeba (epo-Latn_eng-Latn)": 88.96, + "Tatoeba (sqi-Latn_eng-Latn)": 86.21, + "Tatoeba (kzj-Latn_eng-Latn)": 6.56, + "Tatoeba (mkd-Cyrl_eng-Latn)": 63.74, + "Tatoeba (bre-Latn_eng-Latn)": 7.09, + "Tatoeba (dan-Latn_eng-Latn)": 86.38, + "Tatoeba (mhr-Cyrl_eng-Latn)": 5.58, + "Tatoeba (csb-Latn_eng-Latn)": 26.23, + "Tatoeba (xho-Latn_eng-Latn)": 63.2, + "Tatoeba (swe-Latn_eng-Latn)": 87.46, + "Tatoeba (tat-Cyrl_eng-Latn)": 66.8, + "Tatoeba (srp-Cyrl_eng-Latn)": 83.06, + "Tatoeba (cmn-Hans_eng-Latn)": 89.85, + "Tatoeba (ces-Latn_eng-Latn)": 80.99, + "Tatoeba (bel-Cyrl_eng-Latn)": 80.89, + "Tatoeba (yue-Hant_eng-Latn)": 69.33, + "Tatoeba (lit-Latn_eng-Latn)": 59.95, + "Tatoeba (tel-Telu_eng-Latn)": 86.82, + "Tatoeba (nob-Latn_eng-Latn)": 90.18, + "Tatoeba (mar-Deva_eng-Latn)": 85.94, + "Tatoeba (ara-Arab_eng-Latn)": 76.09, + "Tatoeba (swg-Latn_eng-Latn)": 44.0, + "Tatoeba (bos-Latn_eng-Latn)": 81.15, + "Tatoeba (pam-Latn_eng-Latn)": 5.76, + "Tatoeba (fry-Latn_eng-Latn)": 49.05, + "Tatoeba (hun-Latn_eng-Latn)": 74.44, + "Tatoeba (ron-Latn_eng-Latn)": 85.68, + "Tatoeba (afr-Latn_eng-Latn)": 85.17, + "Tatoeba (isl-Latn_eng-Latn)": 62.32, + "Tatoeba (aze-Latn_eng-Latn)": 80.79, + "Tatoeba (hsb-Latn_eng-Latn)": 36.49, + "Tatoeba (tam-Taml_eng-Latn)": 82.82, + "Tatoeba (ceb-Latn_eng-Latn)": 42.35, + "Tatoeba (jav-Latn_eng-Latn)": 53.39, + "Tatoeba (glg-Latn_eng-Latn)": 79.65, + "Tatoeba (por-Latn_eng-Latn)": 89.63, + "Tatoeba (awa-Deva_eng-Latn)": 74.55, + "Tatoeba (hin-Deva_eng-Latn)": 92.36, + "Tatoeba (ita-Latn_eng-Latn)": 88.54, + "Tatoeba (deu-Latn_eng-Latn)": 97.22, + "Tatoeba (gle-Latn_eng-Latn)": 56.32, + "Tatoeba (kat-Geor_eng-Latn)": 77.6, + "Tatoeba (ile-Latn_eng-Latn)": 70.31 } ] }, "Classification": { "accuracy": [ { - "Model": "text-similarity-ada-001", - "AmazonCounterfactualClassification (en)": 76.4, - "AmazonPolarityClassification": 92.83, - "AmazonReviewsClassification (en)": 47.45, - "Banking77Classification": 68.04, - "EmotionClassification": 50.33, - "ImdbClassification": 89.38, - "MTOPDomainClassification (en)": 89.89, - "MTOPIntentClassification (en)": 64.8, - "MassiveIntentClassification (en)": 65.17, - "MassiveScenarioClassification (en)": 67.67, - "ToxicConversationsClassification": 70.0, - "TweetSentimentExtractionClassification": 63.35 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "text-similarity-ada-001", - "ArxivClusteringP2P": 41.49, - "ArxivClusteringS2S": 28.47, - "BiorxivClusteringP2P": 36.86, - "BiorxivClusteringS2S": 27.55, - "MedrxivClusteringP2P": 31.09, - "MedrxivClusteringS2S": 26.5, - "RedditClustering": 42.47, - "RedditClusteringP2P": 58.1, - "StackExchangeClustering": 53.52, - "StackExchangeClusteringP2P": 30.43, - "TwentyNewsgroupsClustering": 36.26 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "text-similarity-ada-001", - "SprintDuplicateQuestions": 77.85, - "TwitterSemEval2015": 69.04, - "TwitterURLCorpus": 83.69 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "text-similarity-ada-001", - "AskUbuntuDupQuestions": 53.49, - "MindSmallReranking": 30.71, - "SciDocsRR": 71.04, - "StackOverflowDupQuestions": 40.85 + "Model": "multilingual-e5-small", + "AllegroReviews (pol-Latn)": 37.33, + "AllegroReviews": 37.42, + "AmazonCounterfactualClassification (en-ext)": 73.07, + "AmazonCounterfactualClassification (en)": 71.87, + "AmazonCounterfactualClassification (deu-Latn)": 71.72, + "AmazonCounterfactualClassification (jpn-Jpan)": 61.46, + "AmazonPolarityClassification": 88.61, + "AmazonReviewsClassification (en)": 45.75, + "AmazonReviewsClassification (deu-Latn)": 41.07, + "AmazonReviewsClassification (spa-Latn)": 41.37, + "AmazonReviewsClassification (fra-Latn)": 39.47, + "AmazonReviewsClassification (jpn-Jpan)": 38.55, + "AmazonReviewsClassification (cmn-Hans)": 38.31, + "AmazonReviewsClassification (fr)": 39.68, + "AngryTweetsClassification (dan-Latn)": 56.27, + "AngryTweetsClassification": 53.57, + "Banking77Classification": 70.44, + "CBD (pol-Latn)": 63.33, + "CBD": 63.25, + "DKHateClassification": 60.73, + "DanishPoliticalCommentsClassification (dan-Latn)": 34.82, + "DanishPoliticalCommentsClassification": 34.38, + "EmotionClassification": 42.86, + "GeoreviewClassification (rus-Cyrl)": 44.66, + "HeadlineClassification (rus-Cyrl)": 73.94, + "IFlyTek (cmn-Hans)": 40.74, + "IFlyTek": 47.35, + "ImdbClassification": 79.57, + "InappropriatenessClassification (rus-Cyrl)": 59.16, + "JDReview (cmn-Hans)": 78.37, + "JDReview": 79.34, + "KinopoiskClassification (rus-Cyrl)": 49.96, + "LccSentimentClassification (dan-Latn)": 58.6, + "LccSentimentClassification": 57.87, + "MTOPDomainClassification (en)": 88.99, + "MTOPDomainClassification (deu-Latn)": 86.15, + "MTOPDomainClassification (spa-Latn)": 85.53, + "MTOPDomainClassification (fra-Latn)": 81.5, + "MTOPDomainClassification (hin-Deva)": 84.07, + "MTOPDomainClassification (tha-Thai)": 83.16, + "MTOPDomainClassification (fr)": 81.2, + "MTOPIntentClassification (en)": 56.69, + "MTOPIntentClassification (deu-Latn)": 55.88, + "MTOPIntentClassification (spa-Latn)": 53.15, + "MTOPIntentClassification (fra-Latn)": 44.35, + "MTOPIntentClassification (hin-Deva)": 52.26, + "MTOPIntentClassification (tha-Thai)": 54.61, + "MTOPIntentClassification (fr)": 46.01, + "MasakhaNEWSClassification (amh-Ethi)": 84.28, + "MasakhaNEWSClassification (eng)": 75.61, + "MasakhaNEWSClassification (fra-Latn)": 74.67, + "MasakhaNEWSClassification (hau-Latn)": 73.08, + "MasakhaNEWSClassification (ibo-Latn)": 63.9, + "MasakhaNEWSClassification (lin-Latn)": 73.37, + "MasakhaNEWSClassification (lug-Latn)": 67.89, + "MasakhaNEWSClassification (orm-Ethi)": 68.77, + "MasakhaNEWSClassification (pcm-Latn)": 90.79, + "MasakhaNEWSClassification (run-Latn)": 75.4, + "MasakhaNEWSClassification (sna-Latn)": 82.76, + "MasakhaNEWSClassification (som-Latn)": 59.8, + "MasakhaNEWSClassification (swa-Latn)": 69.85, + "MasakhaNEWSClassification (tir-Ethi)": 68.01, + "MasakhaNEWSClassification (xho-Latn)": 72.22, + "MasakhaNEWSClassification (yor-Latn)": 73.84, + "MasakhaNEWSClassification (fra)": 77.65, + "MassiveIntentClassification (ben-Beng)": 50.68, + "MassiveIntentClassification (tur-Latn)": 56.88, + "MassiveIntentClassification (ind-Latn)": 56.2, + "MassiveIntentClassification (khm-Khmr)": 33.45, + "MassiveIntentClassification (en)": 63.87, + "MassiveIntentClassification (mal-Mlym)": 52.81, + "MassiveIntentClassification (pol-Latn)": 57.33, + "MassiveIntentClassification (lav-Latn)": 44.93, + "MassiveIntentClassification (isl-Latn)": 41.53, + "MassiveIntentClassification (sqi-Latn)": 48.68, + "MassiveIntentClassification (amh-Ethi)": 43.52, + "MassiveIntentClassification (cmo-Hans)": 62.04, + "MassiveIntentClassification (nld-Latn)": 59.27, + "MassiveIntentClassification (deu-Latn)": 55.52, + "MassiveIntentClassification (nob-Latn)": 55.36, + "MassiveIntentClassification (cmo-Hant)": 53.75, + "MassiveIntentClassification (urd-Arab)": 50.51, + "MassiveIntentClassification (slv-Latn)": 47.71, + "MassiveIntentClassification (hun-Latn)": 53.21, + "MassiveIntentClassification (jpn-Jpan)": 61.58, + "MassiveIntentClassification (swa-Latn)": 44.84, + "MassiveIntentClassification (fra-Latn)": 57.9, + "MassiveIntentClassification (spa-Latn)": 59.19, + "MassiveIntentClassification (mon-Cyrl)": 47.38, + "MassiveIntentClassification (dan-Latn)": 56.12, + "MassiveIntentClassification (msa-Latn)": 50.8, + "MassiveIntentClassification (aze-Latn)": 49.32, + "MassiveIntentClassification (fas-Arab)": 57.73, + "MassiveIntentClassification (kan-Knda)": 47.85, + "MassiveIntentClassification (kor-Kore)": 57.12, + "MassiveIntentClassification (tha-Thai)": 56.26, + "MassiveIntentClassification (heb-Hebr)": 51.11, + "MassiveIntentClassification (hin-Deva)": 55.69, + "MassiveIntentClassification (ara-Arab)": 47.78, + "MassiveIntentClassification (por-Latn)": 60.12, + "MassiveIntentClassification (vie-Latn)": 56.19, + "MassiveIntentClassification (hye-Armn)": 47.89, + "MassiveIntentClassification (ita-Latn)": 58.8, + "MassiveIntentClassification (ell-Grek)": 54.14, + "MassiveIntentClassification (cym-Latn)": 36.62, + "MassiveIntentClassification (tel-Telu)": 48.85, + "MassiveIntentClassification (kat-Geor)": 39.52, + "MassiveIntentClassification (swe-Latn)": 58.2, + "MassiveIntentClassification (tam-Taml)": 47.65, + "MassiveIntentClassification (fin-Latn)": 55.14, + "MassiveIntentClassification (tgl-Latn)": 48.7, + "MassiveIntentClassification (ron-Latn)": 52.82, + "MassiveIntentClassification (jav-Latn)": 42.96, + "MassiveIntentClassification (rus-Cyrl)": 58.43, + "MassiveIntentClassification (afr-Latn)": 48.74, + "MassiveIntentClassification (mya-Mymr)": 45.64, + "MassiveIntentClassification (da)": 54.63, + "MassiveIntentClassification (nb)": 53.96, + "MassiveIntentClassification (sv)": 56.6, + "MassiveIntentClassification (pl)": 57.4, + "MassiveScenarioClassification (nld-Latn)": 67.01, + "MassiveScenarioClassification (tur-Latn)": 62.14, + "MassiveScenarioClassification (cym-Latn)": 44.63, + "MassiveScenarioClassification (jav-Latn)": 51.39, + "MassiveScenarioClassification (hin-Deva)": 62.22, + "MassiveScenarioClassification (fra-Latn)": 63.9, + "MassiveScenarioClassification (cmo-Hans)": 68.96, + "MassiveScenarioClassification (kan-Knda)": 52.73, + "MassiveScenarioClassification (isl-Latn)": 49.66, + "MassiveScenarioClassification (jpn-Jpan)": 67.75, + "MassiveScenarioClassification (mal-Mlym)": 60.31, + "MassiveScenarioClassification (pol-Latn)": 64.27, + "MassiveScenarioClassification (mya-Mymr)": 51.07, + "MassiveScenarioClassification (slv-Latn)": 54.05, + "MassiveScenarioClassification (rus-Cyrl)": 63.89, + "MassiveScenarioClassification (urd-Arab)": 55.91, + "MassiveScenarioClassification (fas-Arab)": 63.32, + "MassiveScenarioClassification (fin-Latn)": 61.89, + "MassiveScenarioClassification (kat-Geor)": 44.96, + "MassiveScenarioClassification (sqi-Latn)": 56.15, + "MassiveScenarioClassification (en)": 69.28, + "MassiveScenarioClassification (hun-Latn)": 61.93, + "MassiveScenarioClassification (aze-Latn)": 53.27, + "MassiveScenarioClassification (heb-Hebr)": 59.22, + "MassiveScenarioClassification (kor-Kore)": 65.7, + "MassiveScenarioClassification (nob-Latn)": 61.96, + "MassiveScenarioClassification (dan-Latn)": 64.03, + "MassiveScenarioClassification (cmo-Hant)": 61.15, + "MassiveScenarioClassification (ron-Latn)": 60.0, + "MassiveScenarioClassification (amh-Ethi)": 50.53, + "MassiveScenarioClassification (spa-Latn)": 64.43, + "MassiveScenarioClassification (afr-Latn)": 58.0, + "MassiveScenarioClassification (lav-Latn)": 51.0, + "MassiveScenarioClassification (deu-Latn)": 65.88, + "MassiveScenarioClassification (ita-Latn)": 64.03, + "MassiveScenarioClassification (tha-Thai)": 65.72, + "MassiveScenarioClassification (msa-Latn)": 59.18, + "MassiveScenarioClassification (tam-Taml)": 52.74, + "MassiveScenarioClassification (ara-Arab)": 54.56, + "MassiveScenarioClassification (tgl-Latn)": 55.3, + "MassiveScenarioClassification (por-Latn)": 62.75, + "MassiveScenarioClassification (swe-Latn)": 67.33, + "MassiveScenarioClassification (tel-Telu)": 54.86, + "MassiveScenarioClassification (khm-Khmr)": 39.01, + "MassiveScenarioClassification (swa-Latn)": 52.42, + "MassiveScenarioClassification (vie-Latn)": 62.67, + "MassiveScenarioClassification (ind-Latn)": 62.0, + "MassiveScenarioClassification (hye-Armn)": 52.93, + "MassiveScenarioClassification (ben-Beng)": 57.38, + "MassiveScenarioClassification (mon-Cyrl)": 52.41, + "MassiveScenarioClassification (ell-Grek)": 62.29, + "MassiveScenarioClassification (da)": 62.34, + "MassiveScenarioClassification (nb)": 59.9, + "MassiveScenarioClassification (sv)": 65.54, + "MassiveScenarioClassification (pl)": 64.25, + "MultilingualSentiment (cmn-Hans)": 66.0, + "MultilingualSentiment": 64.74, + "NoRecClassification (nob-Latn)": 50.08, + "NoRecClassification": 53.96, + "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 72.15, + "NordicLangClassification": 75.15, + "NorwegianParliament": 60.15, + "OnlineShopping (cmn-Hans)": 88.7, + "OnlineShopping": 88.73, + "PAC (pol-Latn)": 70.48, + "PAC": 70.55, + "PolEmo2.0-IN (pol-Latn)": 67.31, + "PolEmo2.0-IN": 67.35, + "PolEmo2.0-OUT (pol-Latn)": 39.17, + "PolEmo2.0-OUT": 39.13, + "RuReviewsClassification (rus-Cyrl)": 61.18, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 54.99, + "RuSciBenchOECDClassification (rus-Cyrl)": 41.72, + "ScalaDaClassification": 50.3, + "ScalaNbClassification": 50.06, + "TNews (cmn-Hans)": 46.6, + "TNews": 48.38, + "ToxicConversationsClassification": 63.59, + "TweetSentimentExtractionClassification": 62.79, + "Waimai (cmn-Hans)": 84.15, + "Waimai": 83.9 } ] }, - "Retrieval": { - "ndcg_at_10": [ + "Clustering": { + "v_measure": [ { - "Model": "text-similarity-ada-001", - "ArguAna": 39.65, - "CQADupstackRetrieval": 10.17, - "ClimateFEVER": 2.83, - "DBPedia": 3.48, - "FEVER": 4.45, - "FiQA2018": 7.54, - "HotpotQA": 12.6, - "MSMARCO": 10.53, - "NFCorpus": 20.59, - "NQ": 2.02, - "QuoraRetrieval": 82.18, - "SCIDOCS": 6.28, - "SciFact": 45.46, - "TRECCOVID": 24.56, - "Touche2020": 3.1 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "text-similarity-ada-001", - "BIOSSES": 78.04, - "SICK-R": 77.48, - "STS12": 72.3, - "STS13": 81.49, - "STS14": 74.74, - "STS15": 84.28, - "STS16": 82.06, - "STS17 (en-en)": 87.08, - "STS22 (en)": 64.71, - "STSBenchmark": 83.78 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "text-similarity-ada-001", - "SummEval": 26.94 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "text-similarity-ada-001" - } - ] - } - }, - "rubert-base-cased-sentence": { - "BitextMining": { - "f1": [ - { - "Model": "rubert-base-cased-sentence", - "Tatoeba (rus-Cyrl_eng-Latn)": 20.26 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "rubert-base-cased-sentence", - "GeoreviewClassification (rus-Cyrl)": 38.05, - "HeadlineClassification (rus-Cyrl)": 67.64, - "InappropriatenessClassification (rus-Cyrl)": 58.27, - "KinopoiskClassification (rus-Cyrl)": 45.86, - "MassiveIntentClassification (rus-Cyrl)": 49.1, - "MassiveScenarioClassification (rus-Cyrl)": 51.91, - "RuReviewsClassification (rus-Cyrl)": 58.34, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 52.18, - "RuSciBenchOECDClassification (rus-Cyrl)": 40.11 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "rubert-base-cased-sentence", - "GeoreviewClusteringP2P (rus-Cyrl)": 41.82, - "MLSUMClusteringP2P (rus-Cyrl)": 43.71, - "MLSUMClusteringS2S (rus-Cyrl)": 45.94, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 46.29, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.28 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "rubert-base-cased-sentence", - "OpusparcusPC (rus-Cyrl)": 81.52, - "TERRa (rus-Cyrl)": 59.12 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "rubert-base-cased-sentence", - "RuBQReranking (rus-Cyrl)": 39.89 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "rubert-base-cased-sentence", - "RiaNewsRetrieval (rus-Cyrl)": 6.72, - "RuBQRetrieval (rus-Cyrl)": 12.63 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "rubert-base-cased-sentence", - "RUParaPhraserSTS (rus-Cyrl)": 66.24, - "RuSTSBenchmarkSTS (rus-Cyrl)": 66.03, - "STS22 (rus-Cyrl)": 51.27, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 66.71 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "rubert-base-cased-sentence" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "rubert-base-cased-sentence" - } - ] - } - }, - "gelectra-large": { - "BitextMining": { - "f1": [ - { - "Model": "gelectra-large" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "gelectra-large" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "gelectra-large", - "BlurbsClusteringP2P": 13.96, - "BlurbsClusteringS2S": 7.57, - "TenKGnadClusteringP2P": 11.49, - "TenKGnadClusteringS2S": 3.91 + "Model": "multilingual-e5-small", + "8TagsClustering": 23.92, + "AlloProfClusteringP2P": 60.89, + "AlloProfClusteringS2S": 32.52, + "BiorxivClusteringP2P": 35.84, + "BiorxivClusteringS2S": 27.35, + "CLSClusteringP2P": 39.14, + "CLSClusteringS2S": 37.79, + "GeoreviewClusteringP2P (rus-Cyrl)": 58.57, + "HALClusteringS2S": 18.95, + "MLSUMClusteringP2P (rus-Cyrl)": 39.69, + "MLSUMClusteringP2P": 43.2, + "MLSUMClusteringS2S (rus-Cyrl)": 39.9, + "MLSUMClusteringS2S": 37.61, + "MasakhaNEWSClusteringP2P (amh-Ethi)": 66.2, + "MasakhaNEWSClusteringP2P (eng)": 50.08, + "MasakhaNEWSClusteringP2P (fra-Latn)": 56.32, + "MasakhaNEWSClusteringP2P (hau-Latn)": 53.63, + "MasakhaNEWSClusteringP2P (ibo-Latn)": 49.19, + "MasakhaNEWSClusteringP2P (lin-Latn)": 55.06, + "MasakhaNEWSClusteringP2P (lug-Latn)": 59.97, + "MasakhaNEWSClusteringP2P (orm-Ethi)": 32.72, + "MasakhaNEWSClusteringP2P (pcm-Latn)": 62.22, + "MasakhaNEWSClusteringP2P (run-Latn)": 57.52, + "MasakhaNEWSClusteringP2P (sna-Latn)": 45.11, + "MasakhaNEWSClusteringP2P (som-Latn)": 42.39, + "MasakhaNEWSClusteringP2P (swa-Latn)": 23.77, + "MasakhaNEWSClusteringP2P (tir-Ethi)": 57.68, + "MasakhaNEWSClusteringP2P (xho-Latn)": 39.96, + "MasakhaNEWSClusteringP2P (yor-Latn)": 26.56, + "MasakhaNEWSClusteringP2P (fra)": 40.12, + "MasakhaNEWSClusteringS2S (amh-Ethi)": 55.48, + "MasakhaNEWSClusteringS2S (eng)": 37.79, + "MasakhaNEWSClusteringS2S (fra-Latn)": 35.8, + "MasakhaNEWSClusteringS2S (hau-Latn)": 20.22, + "MasakhaNEWSClusteringS2S (ibo-Latn)": 35.67, + "MasakhaNEWSClusteringS2S (lin-Latn)": 41.12, + "MasakhaNEWSClusteringS2S (lug-Latn)": 48.63, + "MasakhaNEWSClusteringS2S (orm-Ethi)": 29.16, + "MasakhaNEWSClusteringS2S (pcm-Latn)": 65.36, + "MasakhaNEWSClusteringS2S (run-Latn)": 45.5, + "MasakhaNEWSClusteringS2S (sna-Latn)": 47.61, + "MasakhaNEWSClusteringS2S (som-Latn)": 28.59, + "MasakhaNEWSClusteringS2S (swa-Latn)": 13.91, + "MasakhaNEWSClusteringS2S (tir-Ethi)": 50.51, + "MasakhaNEWSClusteringS2S (xho-Latn)": 37.26, + "MasakhaNEWSClusteringS2S (yor-Latn)": 23.38, + "MasakhaNEWSClusteringS2S (fra)": 39.22, + "MedrxivClusteringP2P": 30.72, + "MedrxivClusteringS2S": 27.0, + "RedditClustering": 40.12, + "RedditClusteringP2P": 59.49, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 51.1, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 45.29, + "StackExchangeClustering": 53.32, + "StackExchangeClusteringP2P": 31.87, + "ThuNewsClusteringP2P": 55.18, + "ThuNewsClusteringS2S": 48.93, + "TwentyNewsgroupsClustering": 33.67 } ] }, "PairClassification": { "ap": [ { - "Model": "gelectra-large" + "Model": "multilingual-e5-small", + "CDSC-E (pol-Latn)": 69.69, + "CDSC-E": 69.7, + "Cmnli": 72.12, + "Ocnli": 60.77, + "OpusparcusPC (deu-Latn)": 94.9, + "OpusparcusPC (en)": 98.42, + "OpusparcusPC (fin-Latn)": 88.29, + "OpusparcusPC (fra-Latn)": 91.77, + "OpusparcusPC (rus-Cyrl)": 84.79, + "OpusparcusPC (swe-Latn)": 91.07, + "OpusparcusPC (fr)": 92.52, + "PPC": 86.72, + "PSC (pol-Latn)": 99.23, + "PSC": 99.24, + "PawsXPairClassification (deu-Latn)": 52.13, + "PawsXPairClassification (en)": 53.91, + "PawsXPairClassification (spa-Latn)": 51.39, + "PawsXPairClassification (fra-Latn)": 52.69, + "PawsXPairClassification (jpn-Hira)": 48.24, + "PawsXPairClassification (kor-Hang)": 49.95, + "PawsXPairClassification (cmn-Hans)": 54.01, + "PawsXPairClassification (fr)": 55.68, + "SICK-E-PL (pol-Latn)": 66.35, + "SICK-E-PL": 66.34, + "SprintDuplicateQuestions": 92.18, + "TERRa (rus-Cyrl)": 55.14, + "TwitterSemEval2015": 70.75, + "TwitterURLCorpus": 85.03 } ] }, "Reranking": { "map": [ { - "Model": "gelectra-large" + "Model": "multilingual-e5-small", + "AlloprofReranking (fra-Latn)": 64.41, + "AlloprofReranking": 56.17, + "AskUbuntuDupQuestions": 56.42, + "CMedQAv1": 63.44, + "CMedQAv2": 62.41, + "MMarcoReranking (cmn-Hans)": 29.98, + "MMarcoReranking": 24.33, + "MindSmallReranking": 29.96, + "RuBQReranking (rus-Cyrl)": 71.46, + "SciDocsRR": 78.26, + "StackOverflowDupQuestions": 46.97, + "SyntecReranking (fra-Latn)": 81.22, + "SyntecReranking": 86.7, + "T2Reranking (cmn-Hans)": 65.72, + "T2Reranking": 65.24 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "gelectra-large" + "Model": "multilingual-e5-small", + "AILACasedocs": 23.43, + "AILAStatutes": 19.01, + "ARCChallenge": 7.14, + "AlloprofRetrieval (fra-Latn)": 27.38, + "AlloprofRetrieval": 27.01, + "AlphaNLI": 13.0, + "ArguAna": 39.09, + "ArguAna-PL (pol-Latn)": 37.49, + "ArguAna-PL": 37.43, + "BSARDRetrieval (fra-Latn)": 14.54, + "BSARDRetrieval": 0.0, + "CmedqaRetrieval (cmn-Hans)": 24.36, + "CmedqaRetrieval": 24.38, + "CovidRetrieval (cmn-Hans)": 72.82, + "CovidRetrieval": 72.82, + "DBPedia-PL": 29.27, + "DuRetrieval (cmn-Hans)": 81.36, + "DuRetrieval": 81.35, + "EcomRetrieval (cmn-Hans)": 53.53, + "EcomRetrieval": 53.56, + "FiQA-PL (pol-Latn)": 22.02, + "FiQA-PL": 22.03, + "FiQA2018": 33.13, + "GerDaLIRSmall (deu-Latn)": 14.81, + "HellaSwag": 23.73, + "HotpotQA-PL": 60.15, + "LEMBNarrativeQARetrieval": 22.6, + "LEMBNeedleRetrieval": 30.75, + "LEMBPasskeyRetrieval": 38.25, + "LEMBQMSumRetrieval": 21.51, + "LEMBSummScreenFDRetrieval": 62.75, + "LEMBWikimQARetrieval": 57.13, + "LeCaRDv2 (zho-Hans)": 61.58, + "LegalBenchConsumerContractsQA": 66.98, + "LegalBenchCorporateLobbying": 89.47, + "LegalQuAD (deu-Latn)": 47.8, + "LegalSummarization": 55.76, + "MMarcoRetrieval (cmn-Hans)": 73.17, + "MMarcoRetrieval": 73.17, + "MSMARCO-PL": 26.94, + "MedicalRetrieval (cmn-Hans)": 44.84, + "MedicalRetrieval": 44.84, + "MintakaRetrieval (ara-Arab)": 21.22, + "MintakaRetrieval (deu-Latn)": 25.6, + "MintakaRetrieval (spa-Latn)": 26.4, + "MintakaRetrieval (fra-Latn)": 25.0, + "MintakaRetrieval (hin-Deva)": 21.1, + "MintakaRetrieval (ita-Latn)": 26.25, + "MintakaRetrieval (jpn-Hira)": 20.69, + "MintakaRetrieval (por-Latn)": 24.44, + "MintakaRetrieval (fr)": 22.53, + "NFCorpus": 31.0, + "NFCorpus-PL (pol-Latn)": 26.5, + "NFCorpus-PL": 26.48, + "NQ-PL": 40.46, + "PIQA": 21.08, + "Quail": 2.38, + "Quora-PL": 78.7, + "RARbCode": 46.96, + "RARbMath": 63.91, + "RiaNewsRetrieval (rus-Cyrl)": 70.01, + "RuBQRetrieval (rus-Cyrl)": 68.53, + "SCIDOCS": 13.9, + "SCIDOCS-PL (pol-Latn)": 11.59, + "SCIDOCS-PL": 11.6, + "SIQA": 2.57, + "SciFact": 67.7, + "SciFact-PL (pol-Latn)": 62.76, + "SciFact-PL": 62.76, + "SpartQA": 5.43, + "SyntecRetrieval (fra-Latn)": 73.46, + "SyntecRetrieval": 75.76, + "T2Retrieval (cmn-Hans)": 71.36, + "T2Retrieval": 71.39, + "TRECCOVID": 72.57, + "TRECCOVID-PL (pol-Latn)": 70.92, + "TRECCOVID-PL": 70.92, + "TempReasonL1": 0.8, + "TempReasonL2Fact": 36.76, + "TempReasonL2Pure": 0.62, + "TempReasonL3Fact": 32.42, + "TempReasonL3Pure": 6.36, + "Touche2020": 21.16, + "VideoRetrieval (cmn-Hans)": 58.06, + "VideoRetrieval": 58.09, + "WinoGrande": 37.46, + "XPQARetrieval (ara-Arab_ara-Arab)": 39.93, + "XPQARetrieval (eng-Latn_ara-Arab)": 18.09, + "XPQARetrieval (ara-Arab_eng-Latn)": 31.64, + "XPQARetrieval (deu-Latn_deu-Latn)": 69.43, + "XPQARetrieval (eng-Latn_deu-Latn)": 25.14, + "XPQARetrieval (deu-Latn_eng-Latn)": 52.36, + "XPQARetrieval (spa-Latn_spa-Latn)": 55.71, + "XPQARetrieval (eng-Latn_spa-Latn)": 22.5, + "XPQARetrieval (spa-Latn_eng-Latn)": 42.4, + "XPQARetrieval (fra-Latn_fra-Latn)": 57.17, + "XPQARetrieval (eng-Latn_fra-Latn)": 27.69, + "XPQARetrieval (fra-Latn_eng-Latn)": 47.46, + "XPQARetrieval (hin-Deva_hin-Deva)": 68.15, + "XPQARetrieval (eng-Latn_hin-Deva)": 25.82, + "XPQARetrieval (hin-Deva_eng-Latn)": 63.79, + "XPQARetrieval (ita-Latn_ita-Latn)": 67.71, + "XPQARetrieval (eng-Latn_ita-Latn)": 22.97, + "XPQARetrieval (ita-Latn_eng-Latn)": 46.61, + "XPQARetrieval (jpn-Hira_jpn-Hira)": 69.49, + "XPQARetrieval (eng-Latn_jpn-Hira)": 25.08, + "XPQARetrieval (jpn-Hira_eng-Latn)": 54.6, + "XPQARetrieval (kor-Hang_kor-Hang)": 33.0, + "XPQARetrieval (eng-Latn_kor-Hang)": 22.49, + "XPQARetrieval (kor-Hang_eng-Latn)": 23.02, + "XPQARetrieval (pol-Latn_pol-Latn)": 43.37, + "XPQARetrieval (eng-Latn_pol-Latn)": 19.89, + "XPQARetrieval (pol-Latn_eng-Latn)": 28.72, + "XPQARetrieval (por-Latn_por-Latn)": 41.8, + "XPQARetrieval (eng-Latn_por-Latn)": 15.79, + "XPQARetrieval (por-Latn_eng-Latn)": 33.74, + "XPQARetrieval (tam-Taml_tam-Taml)": 31.65, + "XPQARetrieval (eng-Latn_tam-Taml)": 13.18, + "XPQARetrieval (tam-Taml_eng-Latn)": 26.44, + "XPQARetrieval (cmn-Hans_cmn-Hans)": 63.98, + "XPQARetrieval (eng-Latn_cmn-Hans)": 16.52, + "XPQARetrieval (cmn-Hans_eng-Latn)": 45.32, + "XPQARetrieval (fr)": 57.47 } ] }, "STS": { "spearman": [ { - "Model": "gelectra-large" + "Model": "multilingual-e5-small", + "AFQMC (cmn-Hans)": 25.21, + "AFQMC": 25.21, + "ATEC (cmn-Hans)": 35.14, + "ATEC": 35.14, + "BIOSSES": 82.46, + "BQ (cmn-Hans)": 43.27, + "BQ": 43.27, + "CDSC-R (pol-Latn)": 90.27, + "CDSC-R": 90.27, + "LCQMC (cmn-Hans)": 72.7, + "LCQMC": 72.7, + "PAWSX (cmn-Hans)": 11.0, + "PAWSX": 11.01, + "QBQTC": 30.25, + "RUParaPhraserSTS (rus-Cyrl)": 70.46, + "RuSTSBenchmarkSTS (rus-Cyrl)": 78.08, + "SICK-R": 77.51, + "SICK-R-PL (pol-Latn)": 69.45, + "SICK-R-PL": 69.46, + "SICKFr (fra-Latn)": 74.67, + "SICKFr": 75.62, + "STS12": 76.56, + "STS13": 76.97, + "STS14": 75.52, + "STS15": 87.12, + "STS16": 83.63, + "STS17 (ita-Latn_eng-Latn)": 77.31, + "STS17 (en-en)": 86.42, + "STS17 (eng-Latn_ara-Arab)": 57.39, + "STS17 (eng-Latn_tur-Latn)": 55.93, + "STS17 (spa-Latn_eng-Latn)": 72.43, + "STS17 (kor-Hang)": 78.87, + "STS17 (spa-Latn)": 84.83, + "STS17 (eng-Latn_deu-Latn)": 76.82, + "STS17 (fra-Latn_eng-Latn)": 72.28, + "STS17 (nld-Latn_eng-Latn)": 75.43, + "STS17 (ara-Arab)": 73.0, + "STS22 (ita-Latn)": 76.53, + "STS22 (en)": 61.25, + "STS22 (pol-Latn_eng-Latn)": 72.69, + "STS22 (cmn-Hans)": 66.85, + "STS22 (fra-Latn)": 76.58, + "STS22 (deu-Latn)": 53.45, + "STS22 (fra-Latn_pol-Latn)": 84.52, + "STS22 (deu-Latn_pol-Latn)": 28.24, + "STS22 (spa-Latn_eng-Latn)": 74.2, + "STS22 (spa-Latn)": 66.86, + "STS22 (rus-Cyrl)": 59.9, + "STS22 (spa-Latn_ita-Latn)": 71.74, + "STS22 (pol-Latn)": 35.78, + "STS22 (tur-Latn)": 63.69, + "STS22 (ara-Arab)": 56.65, + "STS22 (cmn-Hans_eng-Latn)": 65.32, + "STS22 (deu-Latn_eng-Latn)": 56.07, + "STS22 (deu-Latn_fra-Latn)": 60.62, + "STS22 (pl)": 35.8, + "STSB (cmn-Hans)": 77.73, + "STSB": 77.73, + "STSBenchmark": 84.11, + "STSBenchmarkMultilingualSTS (en)": 84.11, + "STSBenchmarkMultilingualSTS (cmn-Hans)": 78.49, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 78.24, + "STSBenchmarkMultilingualSTS (spa-Latn)": 80.31, + "STSBenchmarkMultilingualSTS (deu-Latn)": 79.17, + "STSBenchmarkMultilingualSTS (fra-Latn)": 79.2, + "STSBenchmarkMultilingualSTS (nld-Latn)": 76.04, + "STSBenchmarkMultilingualSTS (pol-Latn)": 72.61, + "STSBenchmarkMultilingualSTS (por-Latn)": 77.39, + "STSBenchmarkMultilingualSTS (ita-Latn)": 78.21, + "STSBenchmarkMultilingualSTS (fr)": 79.32 } ] }, "Summarization": { "spearman": [ { - "Model": "gelectra-large" + "Model": "multilingual-e5-small", + "SummEval": 30.04, + "SummEvalFr (fra-Latn)": 31.14, + "SummEvalFr": 31.85 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "gelectra-large" + "Model": "multilingual-e5-small" } ] } }, - "gtr-t5-base": { + "text-search-curie-001": { "BitextMining": { "f1": [ { - "Model": "gtr-t5-base" + "Model": "text-search-curie-001" } ] }, "Classification": { "accuracy": [ { - "Model": "gtr-t5-base", - "AmazonCounterfactualClassification (en)": 69.33, - "AmazonPolarityClassification": 67.82, - "AmazonReviewsClassification (en)": 38.48, - "Banking77Classification": 79.26, - "EmotionClassification": 42.2, - "ImdbClassification": 65.99, - "MTOPDomainClassification (en)": 92.42, - "MTOPIntentClassification (en)": 62.44, - "MassiveIntentClassification (en)": 67.05, - "MassiveScenarioClassification (en)": 75.4, - "ToxicConversationsClassification": 66.6, - "TweetSentimentExtractionClassification": 56.02 + "Model": "text-search-curie-001" } ] }, "Clustering": { "v_measure": [ { - "Model": "gtr-t5-base", - "ArxivClusteringP2P": 35.49, - "ArxivClusteringS2S": 27.18, - "BiorxivClusteringP2P": 27.66, - "BiorxivClusteringS2S": 23.25, - "MedrxivClusteringP2P": 27.57, - "MedrxivClusteringS2S": 25.13, - "RedditClustering": 56.13, - "RedditClusteringP2P": 58.53, - "StackExchangeClustering": 64.21, - "StackExchangeClusteringP2P": 33.01, - "TwentyNewsgroupsClustering": 46.72 + "Model": "text-search-curie-001" } ] }, "PairClassification": { "ap": [ { - "Model": "gtr-t5-base", - "SprintDuplicateQuestions": 94.55, - "TwitterSemEval2015": 72.23, - "TwitterURLCorpus": 84.77 + "Model": "text-search-curie-001" } ] }, "Reranking": { "map": [ { - "Model": "gtr-t5-base", - "AskUbuntuDupQuestions": 60.86, - "MindSmallReranking": 31.33, - "SciDocsRR": 73.71, - "StackOverflowDupQuestions": 51.01 + "Model": "text-search-curie-001" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "gtr-t5-base", - "ArguAna": 50.83, - "CQADupstackRetrieval": 34.55, - "ClimateFEVER": 24.88, - "DBPedia": 35.24, - "FEVER": 68.93, - "FiQA2018": 35.15, - "HotpotQA": 54.93, - "MSMARCO": 41.16, - "NFCorpus": 30.22, - "NQ": 50.47, - "QuoraRetrieval": 87.98, - "SCIDOCS": 14.0, - "SciFact": 59.74, - "TRECCOVID": 56.05, - "Touche2020": 25.89 + "Model": "text-search-curie-001", + "ArguAna": 46.98, + "ClimateFEVER": 19.4, + "FEVER": 75.6, + "FiQA2018": 45.21, + "HotpotQA": 64.8, + "NFCorpus": 38.01, + "QuoraRetrieval": 67.7, + "SCIDOCS": 17.74, + "SciFact": 74.35, + "TRECCOVID": 56.14, + "Touche2020": 30.9 } ] }, "STS": { "spearman": [ { - "Model": "gtr-t5-base", - "BIOSSES": 79.0, - "SICK-R": 71.45, - "STS12": 68.59, - "STS13": 79.09, - "STS14": 74.64, - "STS15": 84.85, - "STS16": 81.57, - "STS17 (en-en)": 85.8, - "STS22 (en)": 66.17, - "STSBenchmark": 79.58 + "Model": "text-search-curie-001" } ] }, "Summarization": { "spearman": [ { - "Model": "gtr-t5-base", - "SummEval": 29.67 + "Model": "text-search-curie-001" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "gtr-t5-base" + "Model": "text-search-curie-001" } ] } }, - "bge-base-en-v1.5-instruct": { + "rubert-tiny": { "BitextMining": { "f1": [ { - "Model": "bge-base-en-v1.5-instruct" + "Model": "rubert-tiny" } ] }, "Classification": { "accuracy": [ { - "Model": "bge-base-en-v1.5-instruct" + "Model": "rubert-tiny", + "GeoreviewClassification (rus-Cyrl)": 33.45, + "HeadlineClassification (rus-Cyrl)": 57.65, + "InappropriatenessClassification (rus-Cyrl)": 54.5, + "KinopoiskClassification (rus-Cyrl)": 41.36, + "MassiveIntentClassification (rus-Cyrl)": 50.1, + "MassiveScenarioClassification (rus-Cyrl)": 52.15, + "RuReviewsClassification (rus-Cyrl)": 49.56, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 35.71, + "RuSciBenchOECDClassification (rus-Cyrl)": 26.51 } ] }, "Clustering": { "v_measure": [ { - "Model": "bge-base-en-v1.5-instruct" + "Model": "rubert-tiny", + "GeoreviewClusteringP2P (rus-Cyrl)": 34.4, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 29.89, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 27.98 } ] }, "PairClassification": { "ap": [ { - "Model": "bge-base-en-v1.5-instruct" + "Model": "rubert-tiny", + "TERRa (rus-Cyrl)": 51.06 } ] }, "Reranking": { "map": [ { - "Model": "bge-base-en-v1.5-instruct" + "Model": "rubert-tiny", + "RuBQReranking (rus-Cyrl)": 35.44 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "bge-base-en-v1.5-instruct", - "ARCChallenge": 8.85, - "AlphaNLI": 4.13, - "HellaSwag": 24.03, - "PIQA": 23.03, - "Quail": 1.25, - "RARbCode": 46.32, - "RARbMath": 45.62, - "SIQA": 0.24, - "SpartQA": 2.67, - "TempReasonL1": 0.8, - "TempReasonL2Fact": 16.56, - "TempReasonL2Pure": 1.33, - "TempReasonL3Fact": 12.68, - "TempReasonL3Pure": 5.08, - "WinoGrande": 10.27 + "Model": "rubert-tiny", + "RiaNewsRetrieval (rus-Cyrl)": 0.79, + "RuBQRetrieval (rus-Cyrl)": 3.24 } ] }, "STS": { "spearman": [ { - "Model": "bge-base-en-v1.5-instruct" + "Model": "rubert-tiny", + "RUParaPhraserSTS (rus-Cyrl)": 53.41, + "RuSTSBenchmarkSTS (rus-Cyrl)": 58.16, + "STS22 (rus-Cyrl)": 47.88 } ] }, "Summarization": { "spearman": [ { - "Model": "bge-base-en-v1.5-instruct" + "Model": "rubert-tiny" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "bge-base-en-v1.5-instruct" + "Model": "rubert-tiny" } ] } }, - "all-MiniLM-L12-v2": { + "udever-bloom-1b1": { "BitextMining": { "f1": [ { - "Model": "all-MiniLM-L12-v2", - "BornholmBitextMining (dan-Latn)": 35.25, - "Tatoeba (spa-Latn_eng-Latn)": 11.26, - "Tatoeba (bos-Latn_eng-Latn)": 7.05, - "Tatoeba (xho-Latn_eng-Latn)": 3.66, - "Tatoeba (fry-Latn_eng-Latn)": 14.53, - "Tatoeba (tur-Latn_eng-Latn)": 3.69, - "Tatoeba (fao-Latn_eng-Latn)": 5.92, - "Tatoeba (vie-Latn_eng-Latn)": 5.06, - "Tatoeba (ind-Latn_eng-Latn)": 5.3, - "Tatoeba (pol-Latn_eng-Latn)": 4.29, - "Tatoeba (swe-Latn_eng-Latn)": 7.31, - "Tatoeba (ita-Latn_eng-Latn)": 12.57, - "Tatoeba (dtp-Latn_eng-Latn)": 3.31, - "Tatoeba (ron-Latn_eng-Latn)": 8.77, - "Tatoeba (isl-Latn_eng-Latn)": 3.44, - "Tatoeba (hrv-Latn_eng-Latn)": 5.68, - "Tatoeba (cha-Latn_eng-Latn)": 13.07, - "Tatoeba (cor-Latn_eng-Latn)": 2.47, - "Tatoeba (cym-Latn_eng-Latn)": 5.13, - "Tatoeba (jpn-Jpan_eng-Latn)": 2.18, - "Tatoeba (lfn-Latn_eng-Latn)": 7.52, - "Tatoeba (hun-Latn_eng-Latn)": 3.93, - "Tatoeba (lat-Latn_eng-Latn)": 7.14, - "Tatoeba (tgl-Latn_eng-Latn)": 3.34, - "Tatoeba (kur-Latn_eng-Latn)": 7.3, - "Tatoeba (war-Latn_eng-Latn)": 6.18, - "Tatoeba (kab-Latn_eng-Latn)": 0.91, - "Tatoeba (kaz-Cyrl_eng-Latn)": 0.82, - "Tatoeba (slv-Latn_eng-Latn)": 4.52, - "Tatoeba (nds-Latn_eng-Latn)": 11.35, - "Tatoeba (pam-Latn_eng-Latn)": 4.73, - "Tatoeba (bul-Cyrl_eng-Latn)": 0.23, - "Tatoeba (ces-Latn_eng-Latn)": 4.2, - "Tatoeba (nno-Latn_eng-Latn)": 7.45, - "Tatoeba (ben-Beng_eng-Latn)": 0.02, - "Tatoeba (amh-Ethi_eng-Latn)": 0.01, - "Tatoeba (lit-Latn_eng-Latn)": 1.56, - "Tatoeba (pes-Arab_eng-Latn)": 0.3, - "Tatoeba (jav-Latn_eng-Latn)": 3.5, - "Tatoeba (mal-Mlym_eng-Latn)": 0.24, - "Tatoeba (lvs-Latn_eng-Latn)": 3.45, - "Tatoeba (gsw-Latn_eng-Latn)": 9.9, - "Tatoeba (fra-Latn_eng-Latn)": 17.53, - "Tatoeba (orv-Cyrl_eng-Latn)": 0.15, - "Tatoeba (kat-Geor_eng-Latn)": 0.45, - "Tatoeba (awa-Deva_eng-Latn)": 0.44, - "Tatoeba (epo-Latn_eng-Latn)": 8.5, - "Tatoeba (mhr-Cyrl_eng-Latn)": 0.0, - "Tatoeba (dan-Latn_eng-Latn)": 10.21, - "Tatoeba (bel-Cyrl_eng-Latn)": 0.85, - "Tatoeba (nld-Latn_eng-Latn)": 12.56, - "Tatoeba (mkd-Cyrl_eng-Latn)": 0.01, - "Tatoeba (mon-Cyrl_eng-Latn)": 0.06, - "Tatoeba (ast-Latn_eng-Latn)": 9.99, - "Tatoeba (cat-Latn_eng-Latn)": 11.79, - "Tatoeba (oci-Latn_eng-Latn)": 8.72, - "Tatoeba (khm-Khmr_eng-Latn)": 0.42, - "Tatoeba (urd-Arab_eng-Latn)": 0.0, - "Tatoeba (tzl-Latn_eng-Latn)": 6.87, - "Tatoeba (arq-Arab_eng-Latn)": 0.28, - "Tatoeba (uig-Arab_eng-Latn)": 0.4, - "Tatoeba (dsb-Latn_eng-Latn)": 3.06, - "Tatoeba (hsb-Latn_eng-Latn)": 2.89, - "Tatoeba (kzj-Latn_eng-Latn)": 3.64, - "Tatoeba (cbk-Latn_eng-Latn)": 9.76, - "Tatoeba (afr-Latn_eng-Latn)": 7.59, - "Tatoeba (gle-Latn_eng-Latn)": 3.08, - "Tatoeba (csb-Latn_eng-Latn)": 5.21, - "Tatoeba (mar-Deva_eng-Latn)": 0.04, - "Tatoeba (arz-Arab_eng-Latn)": 0.0, - "Tatoeba (tat-Cyrl_eng-Latn)": 0.75, - "Tatoeba (hin-Deva_eng-Latn)": 0.0, - "Tatoeba (ang-Latn_eng-Latn)": 14.63, - "Tatoeba (heb-Hebr_eng-Latn)": 0.3, - "Tatoeba (tuk-Latn_eng-Latn)": 2.66, - "Tatoeba (ile-Latn_eng-Latn)": 17.43, - "Tatoeba (zsm-Latn_eng-Latn)": 5.99, - "Tatoeba (kor-Hang_eng-Latn)": 0.9, - "Tatoeba (uzb-Latn_eng-Latn)": 2.2, - "Tatoeba (fin-Latn_eng-Latn)": 3.65, - "Tatoeba (hye-Armn_eng-Latn)": 0.5, - "Tatoeba (ukr-Cyrl_eng-Latn)": 0.57, - "Tatoeba (swh-Latn_eng-Latn)": 5.82, - "Tatoeba (gla-Latn_eng-Latn)": 2.58, - "Tatoeba (aze-Latn_eng-Latn)": 1.47, - "Tatoeba (ara-Arab_eng-Latn)": 0.43, - "Tatoeba (eus-Latn_eng-Latn)": 6.58, - "Tatoeba (deu-Latn_eng-Latn)": 13.89, - "Tatoeba (por-Latn_eng-Latn)": 11.36, - "Tatoeba (ber-Tfng_eng-Latn)": 4.72, - "Tatoeba (sqi-Latn_eng-Latn)": 5.86, - "Tatoeba (pms-Latn_eng-Latn)": 8.94, - "Tatoeba (ina-Latn_eng-Latn)": 25.36, - "Tatoeba (ido-Latn_eng-Latn)": 11.08, - "Tatoeba (slk-Latn_eng-Latn)": 4.2, - "Tatoeba (glg-Latn_eng-Latn)": 12.6, - "Tatoeba (nov-Latn_eng-Latn)": 19.45, - "Tatoeba (tel-Telu_eng-Latn)": 0.67, - "Tatoeba (tam-Taml_eng-Latn)": 0.33, - "Tatoeba (bre-Latn_eng-Latn)": 3.68, - "Tatoeba (tha-Thai_eng-Latn)": 0.67, - "Tatoeba (nob-Latn_eng-Latn)": 8.02, - "Tatoeba (est-Latn_eng-Latn)": 2.6, - "Tatoeba (wuu-Hans_eng-Latn)": 1.89, - "Tatoeba (swg-Latn_eng-Latn)": 11.9, - "Tatoeba (max-Deva_eng-Latn)": 8.4, - "Tatoeba (srp-Cyrl_eng-Latn)": 2.22, - "Tatoeba (yue-Hant_eng-Latn)": 1.89, - "Tatoeba (rus-Cyrl_eng-Latn)": 0.07, - "Tatoeba (ell-Grek_eng-Latn)": 0.2, - "Tatoeba (ceb-Latn_eng-Latn)": 3.95, - "Tatoeba (yid-Hebr_eng-Latn)": 0.19, - "Tatoeba (cmn-Hans_eng-Latn)": 2.45 + "Model": "udever-bloom-1b1" } ] }, "Classification": { "accuracy": [ { - "Model": "all-MiniLM-L12-v2", - "AllegroReviews (pol-Latn)": 23.85, - "AmazonCounterfactualClassification (en-ext)": 67.24, - "AmazonCounterfactualClassification (en)": 65.28, - "AmazonCounterfactualClassification (deu-Latn)": 57.13, - "AmazonCounterfactualClassification (jpn-Jpan)": 59.94, - "AmazonCounterfactualClassification (de)": 57.1, - "AmazonCounterfactualClassification (ja)": 59.91, - "AmazonPolarityClassification": 62.98, - "AmazonReviewsClassification (en)": 30.79, - "AmazonReviewsClassification (deu-Latn)": 25.92, - "AmazonReviewsClassification (spa-Latn)": 27.64, - "AmazonReviewsClassification (fra-Latn)": 27.53, - "AmazonReviewsClassification (jpn-Jpan)": 23.57, - "AmazonReviewsClassification (cmn-Hans)": 22.99, - "AmazonReviewsClassification (de)": 25.91, - "AmazonReviewsClassification (es)": 27.63, - "AmazonReviewsClassification (fr)": 27.54, - "AmazonReviewsClassification (ja)": 23.57, - "AmazonReviewsClassification (zh)": 22.99, - "AngryTweetsClassification (dan-Latn)": 42.87, - "Banking77Classification": 80.4, - "CBD (pol-Latn)": 48.46, - "DanishPoliticalCommentsClassification (dan-Latn)": 27.07, - "EmotionClassification": 41.17, - "GeoreviewClassification (rus-Cyrl)": 23.49, - "HeadlineClassification (rus-Cyrl)": 28.49, - "IFlyTek (cmn-Hans)": 15.31, - "ImdbClassification": 59.76, - "InappropriatenessClassification (rus-Cyrl)": 50.85, - "JDReview (cmn-Hans)": 59.57, - "KinopoiskClassification (rus-Cyrl)": 34.17, - "LccSentimentClassification (dan-Latn)": 41.93, - "MTOPDomainClassification (en)": 91.9, - "MTOPDomainClassification (deu-Latn)": 72.04, - "MTOPDomainClassification (spa-Latn)": 72.99, - "MTOPDomainClassification (fra-Latn)": 75.57, - "MTOPDomainClassification (hin-Deva)": 40.4, - "MTOPDomainClassification (tha-Thai)": 16.36, - "MTOPDomainClassification (de)": 72.04, - "MTOPDomainClassification (es)": 72.99, - "MTOPDomainClassification (fr)": 75.59, - "MTOPDomainClassification (hi)": 40.36, - "MTOPDomainClassification (th)": 17.1, - "MTOPIntentClassification (en)": 62.84, - "MTOPIntentClassification (deu-Latn)": 43.42, - "MTOPIntentClassification (spa-Latn)": 41.91, - "MTOPIntentClassification (fra-Latn)": 38.96, - "MTOPIntentClassification (hin-Deva)": 17.76, - "MTOPIntentClassification (tha-Thai)": 6.13, - "MTOPIntentClassification (de)": 43.41, - "MTOPIntentClassification (es)": 41.88, - "MTOPIntentClassification (fr)": 38.94, - "MTOPIntentClassification (hi)": 17.75, - "MTOPIntentClassification (th)": 5.63, - "MasakhaNEWSClassification (amh-Ethi)": 30.64, - "MasakhaNEWSClassification (eng)": 76.62, - "MasakhaNEWSClassification (fra-Latn)": 67.18, - "MasakhaNEWSClassification (hau-Latn)": 52.59, - "MasakhaNEWSClassification (ibo-Latn)": 54.26, - "MasakhaNEWSClassification (lin-Latn)": 62.23, - "MasakhaNEWSClassification (lug-Latn)": 47.62, - "MasakhaNEWSClassification (orm-Ethi)": 47.17, - "MasakhaNEWSClassification (pcm-Latn)": 91.77, - "MasakhaNEWSClassification (run-Latn)": 54.47, - "MasakhaNEWSClassification (sna-Latn)": 66.53, - "MasakhaNEWSClassification (som-Latn)": 40.27, - "MasakhaNEWSClassification (swa-Latn)": 47.77, - "MasakhaNEWSClassification (tir-Ethi)": 21.18, - "MasakhaNEWSClassification (xho-Latn)": 54.34, - "MasakhaNEWSClassification (yor-Latn)": 58.61, - "MasakhaNEWSClassification (fra)": 72.2, - "MassiveIntentClassification (jpn-Jpan)": 30.89, - "MassiveIntentClassification (khm-Khmr)": 4.99, - "MassiveIntentClassification (slv-Latn)": 38.48, - "MassiveIntentClassification (hye-Armn)": 8.69, - "MassiveIntentClassification (ita-Latn)": 43.16, - "MassiveIntentClassification (fin-Latn)": 39.19, - "MassiveIntentClassification (afr-Latn)": 38.84, - "MassiveIntentClassification (kor-Kore)": 19.97, - "MassiveIntentClassification (ben-Beng)": 13.7, - "MassiveIntentClassification (heb-Hebr)": 23.71, - "MassiveIntentClassification (dan-Latn)": 44.35, - "MassiveIntentClassification (fra-Latn)": 44.75, - "MassiveIntentClassification (pol-Latn)": 37.59, - "MassiveIntentClassification (por-Latn)": 45.08, - "MassiveIntentClassification (tha-Thai)": 10.46, - "MassiveIntentClassification (nob-Latn)": 41.79, - "MassiveIntentClassification (kat-Geor)": 9.17, - "MassiveIntentClassification (tgl-Latn)": 38.63, - "MassiveIntentClassification (swe-Latn)": 40.33, - "MassiveIntentClassification (hun-Latn)": 37.95, - "MassiveIntentClassification (cmo-Hant)": 22.38, - "MassiveIntentClassification (hin-Deva)": 18.0, - "MassiveIntentClassification (tur-Latn)": 35.93, - "MassiveIntentClassification (vie-Latn)": 37.35, - "MassiveIntentClassification (mal-Mlym)": 2.83, - "MassiveIntentClassification (aze-Latn)": 34.3, - "MassiveIntentClassification (amh-Ethi)": 2.45, - "MassiveIntentClassification (kan-Knda)": 3.07, - "MassiveIntentClassification (deu-Latn)": 44.12, - "MassiveIntentClassification (rus-Cyrl)": 26.29, - "MassiveIntentClassification (ara-Arab)": 21.02, - "MassiveIntentClassification (msa-Latn)": 36.16, - "MassiveIntentClassification (nld-Latn)": 41.77, - "MassiveIntentClassification (fas-Arab)": 23.56, - "MassiveIntentClassification (isl-Latn)": 35.17, - "MassiveIntentClassification (cym-Latn)": 35.65, - "MassiveIntentClassification (cmo-Hans)": 23.74, - "MassiveIntentClassification (ell-Grek)": 28.68, - "MassiveIntentClassification (spa-Latn)": 40.82, - "MassiveIntentClassification (ind-Latn)": 39.65, - "MassiveIntentClassification (jav-Latn)": 36.67, - "MassiveIntentClassification (mon-Cyrl)": 23.27, - "MassiveIntentClassification (mya-Mymr)": 4.36, - "MassiveIntentClassification (sqi-Latn)": 41.47, - "MassiveIntentClassification (tel-Telu)": 2.54, - "MassiveIntentClassification (en)": 67.15, - "MassiveIntentClassification (ron-Latn)": 41.64, - "MassiveIntentClassification (tam-Taml)": 13.12, - "MassiveIntentClassification (swa-Latn)": 35.26, - "MassiveIntentClassification (urd-Arab)": 16.26, - "MassiveIntentClassification (lav-Latn)": 38.54, - "MassiveIntentClassification (af)": 38.94, - "MassiveIntentClassification (am)": 2.45, - "MassiveIntentClassification (ar)": 20.94, - "MassiveIntentClassification (az)": 34.25, - "MassiveIntentClassification (bn)": 13.67, - "MassiveIntentClassification (cy)": 35.71, - "MassiveIntentClassification (da)": 44.43, - "MassiveIntentClassification (de)": 44.17, - "MassiveIntentClassification (el)": 28.7, - "MassiveIntentClassification (es)": 40.91, - "MassiveIntentClassification (fa)": 23.52, - "MassiveIntentClassification (fi)": 39.27, - "MassiveIntentClassification (fr)": 44.82, - "MassiveIntentClassification (he)": 23.65, - "MassiveIntentClassification (hi)": 17.98, - "MassiveIntentClassification (hu)": 38.0, - "MassiveIntentClassification (hy)": 8.69, - "MassiveIntentClassification (id)": 39.66, - "MassiveIntentClassification (is)": 35.14, - "MassiveIntentClassification (it)": 43.17, - "MassiveIntentClassification (ja)": 30.94, - "MassiveIntentClassification (jv)": 36.69, - "MassiveIntentClassification (ka)": 9.17, - "MassiveIntentClassification (km)": 4.99, - "MassiveIntentClassification (kn)": 3.08, - "MassiveIntentClassification (ko)": 19.97, - "MassiveIntentClassification (lv)": 38.61, - "MassiveIntentClassification (ml)": 2.85, - "MassiveIntentClassification (mn)": 23.25, - "MassiveIntentClassification (ms)": 36.21, - "MassiveIntentClassification (my)": 4.38, - "MassiveIntentClassification (nb)": 41.91, - "MassiveIntentClassification (nl)": 41.85, - "MassiveIntentClassification (pl)": 37.63, - "MassiveIntentClassification (pt)": 45.12, - "MassiveIntentClassification (ro)": 41.71, - "MassiveIntentClassification (ru)": 26.33, - "MassiveIntentClassification (sl)": 38.52, - "MassiveIntentClassification (sq)": 41.62, - "MassiveIntentClassification (sv)": 40.42, - "MassiveIntentClassification (sw)": 35.28, - "MassiveIntentClassification (ta)": 13.1, - "MassiveIntentClassification (te)": 2.56, - "MassiveIntentClassification (th)": 10.54, - "MassiveIntentClassification (tl)": 38.56, - "MassiveIntentClassification (tr)": 35.9, - "MassiveIntentClassification (ur)": 16.18, - "MassiveIntentClassification (vi)": 37.38, - "MassiveIntentClassification (zh-CN)": 23.74, - "MassiveIntentClassification (zh-TW)": 22.39, - "MassiveScenarioClassification (jav-Latn)": 44.54, - "MassiveScenarioClassification (aze-Latn)": 39.62, - "MassiveScenarioClassification (cmo-Hans)": 33.19, - "MassiveScenarioClassification (swa-Latn)": 43.18, - "MassiveScenarioClassification (fra-Latn)": 53.77, - "MassiveScenarioClassification (mon-Cyrl)": 29.01, - "MassiveScenarioClassification (kat-Geor)": 14.85, - "MassiveScenarioClassification (ben-Beng)": 18.98, - "MassiveScenarioClassification (ind-Latn)": 44.37, - "MassiveScenarioClassification (kor-Kore)": 25.72, - "MassiveScenarioClassification (lav-Latn)": 42.75, - "MassiveScenarioClassification (deu-Latn)": 52.08, - "MassiveScenarioClassification (hun-Latn)": 44.1, - "MassiveScenarioClassification (tam-Taml)": 19.4, - "MassiveScenarioClassification (afr-Latn)": 45.72, - "MassiveScenarioClassification (nob-Latn)": 47.35, - "MassiveScenarioClassification (urd-Arab)": 24.45, - "MassiveScenarioClassification (tha-Thai)": 18.32, - "MassiveScenarioClassification (ita-Latn)": 51.7, - "MassiveScenarioClassification (en)": 74.58, - "MassiveScenarioClassification (sqi-Latn)": 49.12, - "MassiveScenarioClassification (mya-Mymr)": 10.06, - "MassiveScenarioClassification (ara-Arab)": 27.66, - "MassiveScenarioClassification (tur-Latn)": 41.8, - "MassiveScenarioClassification (khm-Khmr)": 9.75, - "MassiveScenarioClassification (cym-Latn)": 41.43, - "MassiveScenarioClassification (cmo-Hant)": 31.14, - "MassiveScenarioClassification (hye-Armn)": 14.87, - "MassiveScenarioClassification (ell-Grek)": 35.55, - "MassiveScenarioClassification (ron-Latn)": 49.94, - "MassiveScenarioClassification (kan-Knda)": 8.32, - "MassiveScenarioClassification (jpn-Jpan)": 36.77, - "MassiveScenarioClassification (fin-Latn)": 45.8, - "MassiveScenarioClassification (swe-Latn)": 46.81, - "MassiveScenarioClassification (dan-Latn)": 49.5, - "MassiveScenarioClassification (msa-Latn)": 44.67, - "MassiveScenarioClassification (hin-Deva)": 23.03, - "MassiveScenarioClassification (tgl-Latn)": 48.29, - "MassiveScenarioClassification (pol-Latn)": 44.74, - "MassiveScenarioClassification (isl-Latn)": 43.11, - "MassiveScenarioClassification (por-Latn)": 53.0, - "MassiveScenarioClassification (slv-Latn)": 42.24, - "MassiveScenarioClassification (rus-Cyrl)": 28.77, - "MassiveScenarioClassification (tel-Telu)": 7.74, - "MassiveScenarioClassification (heb-Hebr)": 25.73, - "MassiveScenarioClassification (fas-Arab)": 29.0, - "MassiveScenarioClassification (vie-Latn)": 40.97, - "MassiveScenarioClassification (nld-Latn)": 49.14, - "MassiveScenarioClassification (spa-Latn)": 50.73, - "MassiveScenarioClassification (mal-Mlym)": 7.25, - "MassiveScenarioClassification (amh-Ethi)": 7.41, - "MassiveScenarioClassification (af)": 45.71, - "MassiveScenarioClassification (am)": 7.41, - "MassiveScenarioClassification (ar)": 27.62, - "MassiveScenarioClassification (az)": 39.58, - "MassiveScenarioClassification (bn)": 18.98, - "MassiveScenarioClassification (cy)": 41.4, - "MassiveScenarioClassification (da)": 49.47, - "MassiveScenarioClassification (de)": 52.07, - "MassiveScenarioClassification (el)": 35.51, - "MassiveScenarioClassification (es)": 50.74, - "MassiveScenarioClassification (fa)": 29.0, - "MassiveScenarioClassification (fi)": 45.8, - "MassiveScenarioClassification (fr)": 53.76, - "MassiveScenarioClassification (he)": 25.68, - "MassiveScenarioClassification (hi)": 23.02, - "MassiveScenarioClassification (hu)": 44.09, - "MassiveScenarioClassification (hy)": 14.83, - "MassiveScenarioClassification (id)": 44.35, - "MassiveScenarioClassification (is)": 43.08, - "MassiveScenarioClassification (it)": 51.71, - "MassiveScenarioClassification (ja)": 36.75, - "MassiveScenarioClassification (jv)": 44.57, - "MassiveScenarioClassification (ka)": 14.84, - "MassiveScenarioClassification (km)": 9.75, - "MassiveScenarioClassification (kn)": 8.32, - "MassiveScenarioClassification (ko)": 25.72, - "MassiveScenarioClassification (lv)": 42.75, - "MassiveScenarioClassification (ml)": 7.25, - "MassiveScenarioClassification (mn)": 29.03, - "MassiveScenarioClassification (ms)": 44.65, - "MassiveScenarioClassification (my)": 10.07, - "MassiveScenarioClassification (nb)": 47.36, - "MassiveScenarioClassification (nl)": 49.15, - "MassiveScenarioClassification (pl)": 44.72, - "MassiveScenarioClassification (pt)": 53.0, - "MassiveScenarioClassification (ro)": 49.97, - "MassiveScenarioClassification (ru)": 28.75, - "MassiveScenarioClassification (sl)": 42.26, - "MassiveScenarioClassification (sq)": 49.14, - "MassiveScenarioClassification (sv)": 46.83, - "MassiveScenarioClassification (sw)": 43.18, - "MassiveScenarioClassification (ta)": 19.38, - "MassiveScenarioClassification (te)": 7.74, - "MassiveScenarioClassification (th)": 18.32, - "MassiveScenarioClassification (tl)": 48.31, - "MassiveScenarioClassification (tr)": 41.79, - "MassiveScenarioClassification (ur)": 24.46, - "MassiveScenarioClassification (vi)": 40.94, - "MassiveScenarioClassification (zh-CN)": 33.18, - "MassiveScenarioClassification (zh-TW)": 31.16, - "MultilingualSentiment (cmn-Hans)": 40.52, - "NoRecClassification (nob-Latn)": 37.73, - "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 54.17, - "OnlineShopping (cmn-Hans)": 58.65, - "PAC (pol-Latn)": 59.53, - "PolEmo2.0-IN (pol-Latn)": 38.32, - "PolEmo2.0-OUT (pol-Latn)": 22.98, - "RuReviewsClassification (rus-Cyrl)": 42.49, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 10.49, - "RuSciBenchOECDClassification (rus-Cyrl)": 8.31, - "TNews (cmn-Hans)": 20.37, - "ToxicConversationsClassification": 67.47, - "TweetSentimentExtractionClassification": 54.25, - "Waimai (cmn-Hans)": 63.48 + "Model": "udever-bloom-1b1", + "AmazonReviewsClassification (fr)": 35.12, + "MTOPDomainClassification (fr)": 69.24, + "MTOPIntentClassification (fr)": 51.25, + "MasakhaNEWSClassification (fra)": 80.83, + "MassiveIntentClassification (fr)": 43.21, + "MassiveScenarioClassification (fr)": 49.78 } ] }, "Clustering": { "v_measure": [ { - "Model": "all-MiniLM-L12-v2", - "AlloProfClusteringP2P": 46.03, - "AlloProfClusteringS2S": 31.83, - "ArxivClusteringP2P": 46.07, - "ArxivClusteringS2S": 37.5, - "BiorxivClusteringP2P": 36.99, - "BiorxivClusteringS2S": 33.21, - "GeoreviewClusteringP2P (rus-Cyrl)": 20.76, - "HALClusteringS2S": 19.58, - "MLSUMClusteringP2P": 34.35, - "MLSUMClusteringS2S": 29.3, - "MasakhaNEWSClusteringP2P (amh-Ethi)": 40.5, - "MasakhaNEWSClusteringP2P (eng)": 55.86, - "MasakhaNEWSClusteringP2P (fra-Latn)": 42.72, - "MasakhaNEWSClusteringP2P (hau-Latn)": 26.61, - "MasakhaNEWSClusteringP2P (ibo-Latn)": 44.26, - "MasakhaNEWSClusteringP2P (lin-Latn)": 54.52, - "MasakhaNEWSClusteringP2P (lug-Latn)": 43.87, - "MasakhaNEWSClusteringP2P (orm-Ethi)": 24.87, - "MasakhaNEWSClusteringP2P (pcm-Latn)": 74.42, - "MasakhaNEWSClusteringP2P (run-Latn)": 51.73, - "MasakhaNEWSClusteringP2P (sna-Latn)": 46.89, - "MasakhaNEWSClusteringP2P (som-Latn)": 31.17, - "MasakhaNEWSClusteringP2P (swa-Latn)": 23.72, - "MasakhaNEWSClusteringP2P (tir-Ethi)": 44.08, - "MasakhaNEWSClusteringP2P (xho-Latn)": 26.97, - "MasakhaNEWSClusteringP2P (yor-Latn)": 32.51, - "MasakhaNEWSClusteringP2P (fra)": 42.72, - "MasakhaNEWSClusteringS2S (amh-Ethi)": 44.11, - "MasakhaNEWSClusteringS2S (eng)": 40.71, - "MasakhaNEWSClusteringS2S (fra-Latn)": 32.47, - "MasakhaNEWSClusteringS2S (hau-Latn)": 20.63, - "MasakhaNEWSClusteringS2S (ibo-Latn)": 35.33, - "MasakhaNEWSClusteringS2S (lin-Latn)": 54.52, - "MasakhaNEWSClusteringS2S (lug-Latn)": 51.42, - "MasakhaNEWSClusteringS2S (orm-Ethi)": 24.84, - "MasakhaNEWSClusteringS2S (pcm-Latn)": 70.72, - "MasakhaNEWSClusteringS2S (run-Latn)": 50.88, - "MasakhaNEWSClusteringS2S (sna-Latn)": 46.6, - "MasakhaNEWSClusteringS2S (som-Latn)": 29.87, - "MasakhaNEWSClusteringS2S (swa-Latn)": 10.82, - "MasakhaNEWSClusteringS2S (tir-Ethi)": 43.63, - "MasakhaNEWSClusteringS2S (xho-Latn)": 24.55, - "MasakhaNEWSClusteringS2S (yor-Latn)": 32.85, - "MasakhaNEWSClusteringS2S (fra)": 32.47, - "MedrxivClusteringP2P": 34.25, - "MedrxivClusteringS2S": 32.24, - "RedditClustering": 51.18, - "RedditClusteringP2P": 54.8, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 10.65, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 10.19, - "StackExchangeClustering": 53.05, - "StackExchangeClusteringP2P": 33.13, - "TwentyNewsgroupsClustering": 47.47 + "Model": "udever-bloom-1b1", + "AlloProfClusteringP2P": 62.22, + "AlloProfClusteringS2S": 27.06, + "HALClusteringS2S": 13.86, + "MLSUMClusteringP2P": 44.11, + "MLSUMClusteringS2S": 30.47, + "MasakhaNEWSClusteringP2P (fra)": 40.2, + "MasakhaNEWSClusteringS2S (fra)": 27.35 } ] }, "PairClassification": { "ap": [ { - "Model": "all-MiniLM-L12-v2", - "CDSC-E (pol-Latn)": 49.04, - "OpusparcusPC (deu-Latn)": 91.2, - "OpusparcusPC (en)": 97.41, - "OpusparcusPC (fin-Latn)": 85.99, - "OpusparcusPC (fra-Latn)": 87.35, - "OpusparcusPC (rus-Cyrl)": 79.23, - "OpusparcusPC (swe-Latn)": 84.87, - "PSC (pol-Latn)": 87.92, - "PawsXPairClassification (deu-Latn)": 50.83, - "PawsXPairClassification (en)": 58.62, - "PawsXPairClassification (spa-Latn)": 52.08, - "PawsXPairClassification (fra-Latn)": 55.54, - "PawsXPairClassification (jpn-Hira)": 47.75, - "PawsXPairClassification (kor-Hang)": 49.59, - "PawsXPairClassification (cmn-Hans)": 52.8, - "SICK-E-PL (pol-Latn)": 49.63, - "SprintDuplicateQuestions": 92.45, - "TERRa (rus-Cyrl)": 46.4, - "TwitterSemEval2015": 70.02, - "TwitterURLCorpus": 84.77 + "Model": "udever-bloom-1b1", + "OpusparcusPC (fr)": 85.54, + "PawsXPairClassification (fr)": 61.99 } ] }, "Reranking": { "map": [ { - "Model": "all-MiniLM-L12-v2", - "AlloprofReranking (fra-Latn)": 67.01, - "AskUbuntuDupQuestions": 64.06, - "MMarcoReranking (cmn-Hans)": 5.27, - "MindSmallReranking": 31.02, - "RuBQReranking (rus-Cyrl)": 38.51, - "SciDocsRR": 87.2, - "StackOverflowDupQuestions": 51.47, - "SyntecReranking (fra-Latn)": 69.17, - "T2Reranking (cmn-Hans)": 60.32 + "Model": "udever-bloom-1b1", + "AlloprofReranking": 39.13, + "SyntecReranking": 62.58 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "all-MiniLM-L12-v2", - "AILACasedocs": 16.8, - "AILAStatutes": 20.71, - "ARCChallenge": 10.23, - "AlloprofRetrieval (fra-Latn)": 33.2, - "AlloprofRetrieval": 33.2, - "AlphaNLI": 25.35, - "ArguAna": 47.13, - "ArguAna-PL (pol-Latn)": 13.4, - "BSARDRetrieval (fra-Latn)": 6.24, - "CQADupstackRetrieval": 42.53, - "ClimateFEVER": 21.57, - "CmedqaRetrieval (cmn-Hans)": 2.58, - "CovidRetrieval (cmn-Hans)": 10.79, - "DBPedia": 33.35, - "DuRetrieval (cmn-Hans)": 6.62, - "EcomRetrieval (cmn-Hans)": 4.01, - "FEVER": 55.9, - "FiQA-PL (pol-Latn)": 5.82, - "FiQA2018": 37.27, - "GerDaLIRSmall (deu-Latn)": 1.35, - "HellaSwag": 24.08, - "HotpotQA": 44.59, - "LEMBNarrativeQARetrieval": 19.64, - "LEMBNeedleRetrieval": 12.25, - "LEMBPasskeyRetrieval": 14.75, - "LEMBQMSumRetrieval": 13.08, - "LEMBSummScreenFDRetrieval": 46.98, - "LEMBWikimQARetrieval": 44.88, - "LeCaRDv2 (zho-Hans)": 18.77, - "LegalBenchConsumerContractsQA": 60.21, - "LegalBenchCorporateLobbying": 88.69, - "LegalQuAD (deu-Latn)": 7.44, - "LegalSummarization": 57.43, - "MMarcoRetrieval (cmn-Hans)": 7.46, - "MSMARCO": 39.03, - "MedicalRetrieval (cmn-Hans)": 2.3, - "MintakaRetrieval (ara-Arab)": 2.74, - "MintakaRetrieval (deu-Latn)": 20.04, - "MintakaRetrieval (spa-Latn)": 11.76, - "MintakaRetrieval (fra-Latn)": 16.08, - "MintakaRetrieval (hin-Deva)": 3.04, - "MintakaRetrieval (ita-Latn)": 11.83, - "MintakaRetrieval (jpn-Hira)": 7.31, - "MintakaRetrieval (por-Latn)": 13.66, - "NFCorpus": 32.25, - "NFCorpus-PL (pol-Latn)": 15.43, - "NQ": 46.47, - "PIQA": 26.44, - "Quail": 3.08, - "QuoraRetrieval": 87.75, - "RARbCode": 42.44, - "RARbMath": 66.36, - "RuBQRetrieval (rus-Cyrl)": 8.84, - "SCIDOCS": 21.82, - "SCIDOCS-PL (pol-Latn)": 5.34, - "SIQA": 2.09, - "SciFact": 62.64, - "SciFact-PL (pol-Latn)": 22.48, - "SpartQA": 2.67, - "SyntecRetrieval (fra-Latn)": 60.8, - "T2Retrieval (cmn-Hans)": 4.82, - "TRECCOVID": 50.82, - "TRECCOVID-PL (pol-Latn)": 16.52, - "TempReasonL1": 1.66, - "TempReasonL2Fact": 10.31, - "TempReasonL2Pure": 0.63, - "TempReasonL3Fact": 11.11, - "TempReasonL3Pure": 6.63, - "Touche2020": 17.22, - "VideoRetrieval (cmn-Hans)": 9.38, - "WinoGrande": 27.2, - "XPQARetrieval (ara-Arab_ara-Arab)": 7.83, - "XPQARetrieval (eng-Latn_ara-Arab)": 2.52, - "XPQARetrieval (ara-Arab_eng-Latn)": 8.88, - "XPQARetrieval (deu-Latn_deu-Latn)": 56.77, - "XPQARetrieval (eng-Latn_deu-Latn)": 18.2, - "XPQARetrieval (deu-Latn_eng-Latn)": 30.06, - "XPQARetrieval (spa-Latn_spa-Latn)": 42.22, - "XPQARetrieval (eng-Latn_spa-Latn)": 7.53, - "XPQARetrieval (spa-Latn_eng-Latn)": 26.27, - "XPQARetrieval (fra-Latn_fra-Latn)": 55.9, - "XPQARetrieval (eng-Latn_fra-Latn)": 14.89, - "XPQARetrieval (fra-Latn_eng-Latn)": 34.2, - "XPQARetrieval (hin-Deva_hin-Deva)": 33.26, - "XPQARetrieval (eng-Latn_hin-Deva)": 6.44, - "XPQARetrieval (hin-Deva_eng-Latn)": 6.98, - "XPQARetrieval (ita-Latn_ita-Latn)": 58.68, - "XPQARetrieval (eng-Latn_ita-Latn)": 8.56, - "XPQARetrieval (ita-Latn_eng-Latn)": 28.71, - "XPQARetrieval (jpn-Hira_jpn-Hira)": 39.53, - "XPQARetrieval (eng-Latn_jpn-Hira)": 5.7, - "XPQARetrieval (jpn-Hira_eng-Latn)": 13.75, - "XPQARetrieval (kor-Hang_kor-Hang)": 13.48, - "XPQARetrieval (eng-Latn_kor-Hang)": 7.43, - "XPQARetrieval (kor-Hang_eng-Latn)": 7.34, - "XPQARetrieval (pol-Latn_pol-Latn)": 28.07, - "XPQARetrieval (eng-Latn_pol-Latn)": 10.03, - "XPQARetrieval (pol-Latn_eng-Latn)": 16.58, - "XPQARetrieval (por-Latn_por-Latn)": 34.09, - "XPQARetrieval (eng-Latn_por-Latn)": 7.38, - "XPQARetrieval (por-Latn_eng-Latn)": 22.59, - "XPQARetrieval (tam-Taml_tam-Taml)": 9.13, - "XPQARetrieval (eng-Latn_tam-Taml)": 4.15, - "XPQARetrieval (tam-Taml_eng-Latn)": 3.76, - "XPQARetrieval (cmn-Hans_cmn-Hans)": 21.09, - "XPQARetrieval (eng-Latn_cmn-Hans)": 6.58, - "XPQARetrieval (cmn-Hans_eng-Latn)": 9.39, - "XPQARetrieval (fr)": 55.9 + "Model": "udever-bloom-1b1", + "AlloprofRetrieval": 12.37, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 2.78, + "SyntecRetrieval": 40.57, + "XPQARetrieval (fr)": 33.82 } ] }, "STS": { "spearman": [ { - "Model": "all-MiniLM-L12-v2", - "AFQMC (cmn-Hans)": 7.94, - "ATEC (cmn-Hans)": 12.97, - "BIOSSES": 83.57, - "BQ (cmn-Hans)": 23.31, - "CDSC-R (pol-Latn)": 82.5, - "LCQMC (cmn-Hans)": 21.04, - "PAWSX (cmn-Hans)": 7.31, - "RUParaPhraserSTS (rus-Cyrl)": 45.47, - "RuSTSBenchmarkSTS (rus-Cyrl)": 56.33, - "SICK-R": 79.32, - "SICK-R-PL (pol-Latn)": 54.26, - "SICKFr (fra-Latn)": 63.16, - "STS12": 73.08, - "STS13": 82.13, - "STS14": 76.73, - "STS15": 85.58, - "STS16": 80.23, - "STS17 (nld-Latn_eng-Latn)": 24.51, - "STS17 (eng-Latn_ara-Arab)": 0.54, - "STS17 (ara-Arab)": 58.71, - "STS17 (kor-Hang)": 43.37, - "STS17 (eng-Latn_tur-Latn)": 0.43, - "STS17 (ita-Latn_eng-Latn)": 24.28, - "STS17 (eng-Latn_deu-Latn)": 27.54, - "STS17 (fra-Latn_eng-Latn)": 30.7, - "STS17 (spa-Latn)": 78.37, - "STS17 (en-en)": 88.63, - "STS17 (spa-Latn_eng-Latn)": 22.01, - "STS17 (ar-ar)": 58.71, - "STS17 (en-ar)": 0.54, - "STS17 (en-de)": 27.54, - "STS17 (en-tr)": 0.43, - "STS17 (es-en)": 22.01, - "STS17 (es-es)": 78.37, - "STS17 (fr-en)": 30.7, - "STS17 (it-en)": 24.28, - "STS17 (ko-ko)": 43.37, - "STS17 (nl-en)": 24.51, - "STS22 (ara-Arab)": 17.54, - "STS22 (cmn-Hans)": 33.15, - "STS22 (fra-Latn)": 69.51, - "STS22 (deu-Latn_eng-Latn)": 42.86, - "STS22 (pol-Latn)": 19.22, - "STS22 (spa-Latn_eng-Latn)": 53.99, - "STS22 (pol-Latn_eng-Latn)": 42.67, - "STS22 (tur-Latn)": 21.6, - "STS22 (deu-Latn_fra-Latn)": 43.52, - "STS22 (fra-Latn_pol-Latn)": 16.9, - "STS22 (deu-Latn)": 22.53, - "STS22 (deu-Latn_pol-Latn)": 1.63, - "STS22 (en)": 65.67, - "STS22 (spa-Latn)": 43.98, - "STS22 (cmn-Hans_eng-Latn)": 44.39, - "STS22 (spa-Latn_ita-Latn)": 40.71, - "STS22 (ita-Latn)": 47.48, - "STS22 (rus-Cyrl)": 11.19, - "STS22 (ar)": 17.54, - "STS22 (de)": 22.53, - "STS22 (de-en)": 42.86, - "STS22 (de-fr)": 43.52, - "STS22 (de-pl)": 1.63, - "STS22 (es)": 43.98, - "STS22 (es-en)": 53.99, - "STS22 (es-it)": 40.71, - "STS22 (fr)": 69.51, - "STS22 (fr-pl)": 16.9, - "STS22 (it)": 47.48, - "STS22 (pl)": 19.22, - "STS22 (pl-en)": 42.67, - "STS22 (ru)": 11.19, - "STS22 (tr)": 21.6, - "STS22 (zh)": 33.15, - "STS22 (zh-en)": 44.39, - "STSB (cmn-Hans)": 36.66, - "STSBenchmark": 83.09, - "STSBenchmarkMultilingualSTS (nld-Latn)": 60.03, - "STSBenchmarkMultilingualSTS (spa-Latn)": 65.33, - "STSBenchmarkMultilingualSTS (ita-Latn)": 60.71, - "STSBenchmarkMultilingualSTS (cmn-Hans)": 38.93, - "STSBenchmarkMultilingualSTS (en)": 83.09, - "STSBenchmarkMultilingualSTS (por-Latn)": 63.85, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 56.09, - "STSBenchmarkMultilingualSTS (fra-Latn)": 66.68, - "STSBenchmarkMultilingualSTS (pol-Latn)": 60.2, - "STSBenchmarkMultilingualSTS (deu-Latn)": 63.28 + "Model": "udever-bloom-1b1", + "SICKFr": 59.94, + "STS22 (fr)": 77.1, + "STSBenchmarkMultilingualSTS (fr)": 49.97 } ] }, "Summarization": { "spearman": [ { - "Model": "all-MiniLM-L12-v2", - "SummEval": 27.9, - "SummEvalFr (fra-Latn)": 26.63 + "Model": "udever-bloom-1b1", + "SummEvalFr": 29.48 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "all-MiniLM-L12-v2" + "Model": "udever-bloom-1b1" } ] } }, - "rubert-tiny2": { + "USER-bge-m3": { "BitextMining": { "f1": [ { - "Model": "rubert-tiny2" + "Model": "USER-bge-m3", + "Tatoeba (rus-Cyrl_eng-Latn)": 93.52 } ] }, "Classification": { "accuracy": [ { - "Model": "rubert-tiny2", - "GeoreviewClassification (rus-Cyrl)": 39.64, - "HeadlineClassification (rus-Cyrl)": 74.19, - "InappropriatenessClassification (rus-Cyrl)": 58.57, - "KinopoiskClassification (rus-Cyrl)": 49.06, - "MassiveIntentClassification (rus-Cyrl)": 50.83, - "MassiveScenarioClassification (rus-Cyrl)": 59.15, - "RuReviewsClassification (rus-Cyrl)": 56.99, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 45.63, - "RuSciBenchOECDClassification (rus-Cyrl)": 35.48 + "Model": "USER-bge-m3", + "GeoreviewClassification (rus-Cyrl)": 50.98, + "HeadlineClassification (rus-Cyrl)": 70.09, + "InappropriatenessClassification (rus-Cyrl)": 60.76, + "KinopoiskClassification (rus-Cyrl)": 63.33, + "MassiveIntentClassification (rus-Cyrl)": 68.85, + "MassiveScenarioClassification (rus-Cyrl)": 72.9, + "RuReviewsClassification (rus-Cyrl)": 68.52, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 57.67, + "RuSciBenchOECDClassification (rus-Cyrl)": 44.2 } ] }, "Clustering": { "v_measure": [ { - "Model": "rubert-tiny2", - "GeoreviewClusteringP2P (rus-Cyrl)": 44.18, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 41.41, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 38.09 + "Model": "USER-bge-m3", + "GeoreviewClusteringP2P (rus-Cyrl)": 62.79, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 53.11, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.93 } ] }, "PairClassification": { "ap": [ { - "Model": "rubert-tiny2", - "TERRa (rus-Cyrl)": 51.87 + "Model": "USER-bge-m3", + "OpusparcusPC (rus-Cyrl)": 90.73, + "TERRa (rus-Cyrl)": 64.99 } ] }, "Reranking": { "map": [ { - "Model": "rubert-tiny2", - "RuBQReranking (rus-Cyrl)": 46.09 + "Model": "USER-bge-m3", + "RuBQReranking (rus-Cyrl)": 73.08 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "rubert-tiny2", - "RiaNewsRetrieval (rus-Cyrl)": 13.92, - "RuBQRetrieval (rus-Cyrl)": 10.87 + "Model": "USER-bge-m3", + "RiaNewsRetrieval (rus-Cyrl)": 83.53, + "RuBQRetrieval (rus-Cyrl)": 70.03 } ] }, "STS": { "spearman": [ { - "Model": "rubert-tiny2", - "RUParaPhraserSTS (rus-Cyrl)": 65.14, - "RuSTSBenchmarkSTS (rus-Cyrl)": 69.43, - "STS22 (rus-Cyrl)": 50.23 + "Model": "USER-bge-m3", + "RUParaPhraserSTS (rus-Cyrl)": 76.36, + "RuSTSBenchmarkSTS (rus-Cyrl)": 83.35, + "STS22 (rus-Cyrl)": 66.42, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 82.96 } ] }, "Summarization": { "spearman": [ { - "Model": "rubert-tiny2" + "Model": "USER-bge-m3" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "rubert-tiny2" + "Model": "USER-bge-m3" } ] } }, - "electra-small-swedish-cased-discriminator": { + "sbert_large_nlu_ru": { "BitextMining": { "f1": [ { - "Model": "electra-small-swedish-cased-discriminator", - "BornholmBitextMining": 0.85 + "Model": "sbert_large_nlu_ru" } ] }, "Classification": { "accuracy": [ { - "Model": "electra-small-swedish-cased-discriminator", - "AngryTweetsClassification": 40.52, - "DKHateClassification": 52.28, - "DanishPoliticalCommentsClassification": 25.17, - "LccSentimentClassification": 36.67, - "MassiveIntentClassification (da)": 6.51, - "MassiveIntentClassification (nb)": 5.66, - "MassiveIntentClassification (sv)": 6.6, - "MassiveScenarioClassification (da)": 11.5, - "MassiveScenarioClassification (nb)": 11.26, - "MassiveScenarioClassification (sv)": 12.16, - "NoRecClassification": 39.72, - "NordicLangClassification": 44.53, - "NorwegianParliament": 52.44, - "ScalaDaClassification": 51.66, - "ScalaNbClassification": 52.41 + "Model": "sbert_large_nlu_ru", + "GeoreviewClassification (rus-Cyrl)": 39.97, + "HeadlineClassification (rus-Cyrl)": 79.26, + "InappropriatenessClassification (rus-Cyrl)": 62.52, + "KinopoiskClassification (rus-Cyrl)": 49.51, + "MassiveIntentClassification (rus-Cyrl)": 61.09, + "MassiveScenarioClassification (rus-Cyrl)": 67.6, + "RuReviewsClassification (rus-Cyrl)": 58.27, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 53.9, + "RuSciBenchOECDClassification (rus-Cyrl)": 43.04 } ] }, "Clustering": { "v_measure": [ { - "Model": "electra-small-swedish-cased-discriminator" + "Model": "sbert_large_nlu_ru", + "GeoreviewClusteringP2P (rus-Cyrl)": 59.02, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 50.4, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 46.41 } ] }, "PairClassification": { "ap": [ { - "Model": "electra-small-swedish-cased-discriminator" + "Model": "sbert_large_nlu_ru", + "TERRa (rus-Cyrl)": 50.17 } ] }, "Reranking": { "map": [ { - "Model": "electra-small-swedish-cased-discriminator" + "Model": "sbert_large_nlu_ru", + "RuBQReranking (rus-Cyrl)": 46.81 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "electra-small-swedish-cased-discriminator" + "Model": "sbert_large_nlu_ru", + "RiaNewsRetrieval (rus-Cyrl)": 11.11, + "RuBQRetrieval (rus-Cyrl)": 12.45 } ] }, "STS": { "spearman": [ { - "Model": "electra-small-swedish-cased-discriminator" + "Model": "sbert_large_nlu_ru", + "RUParaPhraserSTS (rus-Cyrl)": 62.06, + "RuSTSBenchmarkSTS (rus-Cyrl)": 58.82, + "STS22 (rus-Cyrl)": 50.75 } ] }, "Summarization": { "spearman": [ { - "Model": "electra-small-swedish-cased-discriminator" + "Model": "sbert_large_nlu_ru" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "electra-small-swedish-cased-discriminator" + "Model": "sbert_large_nlu_ru" } ] } }, - "e5-base-4k": { + "sentence-bert-swedish-cased": { "BitextMining": { "f1": [ { - "Model": "e5-base-4k" + "Model": "sentence-bert-swedish-cased", + "BornholmBitextMining": 14.08 } ] }, "Classification": { "accuracy": [ { - "Model": "e5-base-4k" + "Model": "sentence-bert-swedish-cased", + "AngryTweetsClassification": 44.46, + "DKHateClassification": 59.36, + "DanishPoliticalCommentsClassification": 28.32, + "LccSentimentClassification": 47.2, + "MassiveIntentClassification (da)": 42.84, + "MassiveIntentClassification (nb)": 42.74, + "MassiveIntentClassification (sv)": 69.11, + "MassiveScenarioClassification (da)": 49.64, + "MassiveScenarioClassification (nb)": 49.49, + "MassiveScenarioClassification (sv)": 75.96, + "NoRecClassification": 43.53, + "NordicLangClassification": 51.45, + "NorwegianParliament": 55.74, + "ScalaDaClassification": 50.12, + "ScalaNbClassification": 50.34 } ] }, "Clustering": { "v_measure": [ { - "Model": "e5-base-4k" + "Model": "sentence-bert-swedish-cased" } ] }, "PairClassification": { "ap": [ { - "Model": "e5-base-4k" + "Model": "sentence-bert-swedish-cased" } ] }, "Reranking": { "map": [ { - "Model": "e5-base-4k" + "Model": "sentence-bert-swedish-cased" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "e5-base-4k", - "LEMBNarrativeQARetrieval": 30.35, - "LEMBNeedleRetrieval": 41.5, - "LEMBPasskeyRetrieval": 67.25, - "LEMBQMSumRetrieval": 35.6, - "LEMBSummScreenFDRetrieval": 95.23, - "LEMBWikimQARetrieval": 69.19 + "Model": "sentence-bert-swedish-cased" } ] }, "STS": { "spearman": [ { - "Model": "e5-base-4k" + "Model": "sentence-bert-swedish-cased" } ] }, "Summarization": { "spearman": [ { - "Model": "e5-base-4k" + "Model": "sentence-bert-swedish-cased" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "e5-base-4k" + "Model": "sentence-bert-swedish-cased" } ] } }, - "instructor-base": { + "LLM2Vec-Llama-2-unsupervised": { "BitextMining": { "f1": [ { - "Model": "instructor-base" + "Model": "LLM2Vec-Llama-2-unsupervised" } ] }, "Classification": { "accuracy": [ { - "Model": "instructor-base" + "Model": "LLM2Vec-Llama-2-unsupervised", + "AmazonCounterfactualClassification (en)": 76.91, + "AmazonPolarityClassification": 79.05, + "AmazonReviewsClassification (en)": 40.08, + "Banking77Classification": 84.65, + "EmotionClassification": 46.58, + "ImdbClassification": 75.68, + "MTOPDomainClassification (en)": 94.33, + "MTOPIntentClassification (en)": 79.54, + "MassiveIntentClassification (en)": 73.84, + "MassiveScenarioClassification (en)": 79.17, + "ToxicConversationsClassification": 71.81, + "TweetSentimentExtractionClassification": 57.17 } ] }, "Clustering": { "v_measure": [ { - "Model": "instructor-base" + "Model": "LLM2Vec-Llama-2-unsupervised", + "ArxivClusteringP2P": 47.81, + "ArxivClusteringS2S": 40.53, + "BiorxivClusteringP2P": 38.12, + "BiorxivClusteringS2S": 31.25, + "MedrxivClusteringP2P": 30.94, + "MedrxivClusteringS2S": 28.04, + "RedditClustering": 42.84, + "RedditClusteringP2P": 60.1, + "StackExchangeClustering": 65.12, + "StackExchangeClusteringP2P": 33.61, + "TwentyNewsgroupsClustering": 30.76 } ] }, "PairClassification": { "ap": [ { - "Model": "instructor-base" + "Model": "LLM2Vec-Llama-2-unsupervised", + "SprintDuplicateQuestions": 87.57, + "TwitterSemEval2015": 65.14, + "TwitterURLCorpus": 80.94 } ] }, "Reranking": { "map": [ { - "Model": "instructor-base" + "Model": "LLM2Vec-Llama-2-unsupervised", + "AskUbuntuDupQuestions": 55.56, + "MindSmallReranking": 30.86, + "SciDocsRR": 77.62, + "StackOverflowDupQuestions": 47.77 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "instructor-base" - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "instructor-base" - } - ] - }, - "Summarization": { - "spearman": [ + "Model": "LLM2Vec-Llama-2-unsupervised", + "ArguAna": 47.09, + "CQADupstackRetrieval": 30.78, + "ClimateFEVER": 20.67, + "DBPedia": 25.81, + "FEVER": 43.48, + "FiQA2018": 24.62, + "HotpotQA": 48.46, + "MSMARCO": 18.81, + "NFCorpus": 26.81, + "NQ": 33.21, + "QuoraRetrieval": 86.15, + "SCIDOCS": 10.0, + "SciFact": 64.48, + "TRECCOVID": 60.67, + "Touche2020": 10.18 + } + ] + }, + "STS": { + "spearman": [ { - "Model": "instructor-base" + "Model": "LLM2Vec-Llama-2-unsupervised", + "BIOSSES": 82.41, + "SICK-R": 71.77, + "STS12": 65.39, + "STS13": 79.26, + "STS14": 72.98, + "STS15": 82.72, + "STS16": 81.02, + "STS17 (en-en)": 86.7, + "STS22 (en)": 63.47, + "STSBenchmark": 78.32 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "LLM2Vec-Llama-2-unsupervised", + "SummEval": 31.38 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "instructor-base", - "Core17InstructionRetrieval": -1.09, - "News21InstructionRetrieval": -1.78, - "Robust04InstructionRetrieval": -10.42 + "Model": "LLM2Vec-Llama-2-unsupervised" } ] } }, - "distiluse-base-multilingual-cased-v2": { + "all-MiniLM-L6-v2-instruct": { "BitextMining": { "f1": [ { - "Model": "distiluse-base-multilingual-cased-v2" + "Model": "all-MiniLM-L6-v2-instruct" } ] }, "Classification": { "accuracy": [ { - "Model": "distiluse-base-multilingual-cased-v2", - "AllegroReviews": 28.03, - "AmazonCounterfactualClassification (de)": 68.14, - "AmazonCounterfactualClassification (en)": 71.81, - "AmazonCounterfactualClassification (en-ext)": 72.96, - "AmazonCounterfactualClassification (ja)": 65.39, - "AmazonPolarityClassification": 68.0, - "AmazonReviewsClassification (de)": 35.03, - "AmazonReviewsClassification (en)": 35.45, - "AmazonReviewsClassification (es)": 36.24, - "AmazonReviewsClassification (fr)": 35.7, - "AmazonReviewsClassification (ja)": 31.08, - "AmazonReviewsClassification (zh)": 33.89, - "Banking77Classification": 71.48, - "CBD": 60.0, - "EmotionClassification": 40.04, - "ImdbClassification": 61.52, - "MTOPDomainClassification (de)": 86.19, - "MTOPDomainClassification (en)": 91.59, - "MTOPDomainClassification (es)": 87.75, - "MTOPDomainClassification (fr)": 84.61, - "MTOPDomainClassification (hi)": 76.41, - "MTOPDomainClassification (th)": 73.62, - "MTOPIntentClassification (de)": 59.21, - "MTOPIntentClassification (en)": 66.4, - "MTOPIntentClassification (es)": 57.21, - "MTOPIntentClassification (fr)": 53.41, - "MTOPIntentClassification (hi)": 45.54, - "MTOPIntentClassification (th)": 47.73, - "MasakhaNEWSClassification (fra)": 76.87, - "MassiveIntentClassification (af)": 40.02, - "MassiveIntentClassification (am)": 2.35, - "MassiveIntentClassification (ar)": 43.14, - "MassiveIntentClassification (az)": 25.6, - "MassiveIntentClassification (bn)": 4.84, - "MassiveIntentClassification (cy)": 15.43, - "MassiveIntentClassification (da)": 52.33, - "MassiveIntentClassification (de)": 51.57, - "MassiveIntentClassification (el)": 49.65, - "MassiveIntentClassification (en)": 66.71, - "MassiveIntentClassification (es)": 56.57, - "MassiveIntentClassification (fa)": 55.36, - "MassiveIntentClassification (fi)": 45.72, - "MassiveIntentClassification (fr)": 57.02, - "MassiveIntentClassification (he)": 46.74, - "MassiveIntentClassification (hi)": 48.55, - "MassiveIntentClassification (hu)": 50.65, - "MassiveIntentClassification (hy)": 40.79, - "MassiveIntentClassification (id)": 56.0, - "MassiveIntentClassification (is)": 16.08, - "MassiveIntentClassification (it)": 57.65, - "MassiveIntentClassification (ja)": 55.33, - "MassiveIntentClassification (jv)": 28.16, - "MassiveIntentClassification (ka)": 29.41, - "MassiveIntentClassification (km)": 4.79, - "MassiveIntentClassification (kn)": 3.37, - "MassiveIntentClassification (ko)": 49.97, - "MassiveIntentClassification (lv)": 44.31, - "MassiveIntentClassification (ml)": 3.24, - "MassiveIntentClassification (mn)": 40.37, - "MassiveIntentClassification (ms)": 47.97, - "MassiveIntentClassification (my)": 38.48, - "MassiveIntentClassification (nb)": 46.01, - "MassiveIntentClassification (nl)": 58.29, - "MassiveIntentClassification (pl)": 53.1, - "MassiveIntentClassification (pt)": 58.63, - "MassiveIntentClassification (ro)": 50.63, - "MassiveIntentClassification (ru)": 57.96, - "MassiveIntentClassification (sl)": 50.66, - "MassiveIntentClassification (sq)": 50.25, - "MassiveIntentClassification (sv)": 52.41, - "MassiveIntentClassification (sw)": 19.29, - "MassiveIntentClassification (ta)": 3.79, - "MassiveIntentClassification (te)": 3.36, - "MassiveIntentClassification (th)": 45.28, - "MassiveIntentClassification (tl)": 28.44, - "MassiveIntentClassification (tr)": 50.47, - "MassiveIntentClassification (ur)": 46.03, - "MassiveIntentClassification (vi)": 45.25, - "MassiveIntentClassification (zh-CN)": 59.22, - "MassiveIntentClassification (zh-TW)": 54.96, - "MassiveScenarioClassification (af)": 53.67, - "MassiveScenarioClassification (am)": 7.72, - "MassiveScenarioClassification (ar)": 52.19, - "MassiveScenarioClassification (az)": 34.75, - "MassiveScenarioClassification (bn)": 10.65, - "MassiveScenarioClassification (cy)": 21.24, - "MassiveScenarioClassification (da)": 62.55, - "MassiveScenarioClassification (de)": 61.4, - "MassiveScenarioClassification (el)": 60.68, - "MassiveScenarioClassification (en)": 74.0, - "MassiveScenarioClassification (es)": 64.61, - "MassiveScenarioClassification (fa)": 59.24, - "MassiveScenarioClassification (fi)": 54.66, - "MassiveScenarioClassification (fr)": 65.2, - "MassiveScenarioClassification (he)": 54.74, - "MassiveScenarioClassification (hi)": 55.99, - "MassiveScenarioClassification (hu)": 61.2, - "MassiveScenarioClassification (hy)": 49.63, - "MassiveScenarioClassification (id)": 65.25, - "MassiveScenarioClassification (is)": 22.6, - "MassiveScenarioClassification (it)": 64.63, - "MassiveScenarioClassification (ja)": 62.32, - "MassiveScenarioClassification (jv)": 35.77, - "MassiveScenarioClassification (ka)": 39.08, - "MassiveScenarioClassification (km)": 9.24, - "MassiveScenarioClassification (kn)": 8.28, - "MassiveScenarioClassification (ko)": 57.6, - "MassiveScenarioClassification (lv)": 51.72, - "MassiveScenarioClassification (ml)": 8.25, - "MassiveScenarioClassification (mn)": 47.21, - "MassiveScenarioClassification (ms)": 55.65, - "MassiveScenarioClassification (my)": 43.31, - "MassiveScenarioClassification (nb)": 54.98, - "MassiveScenarioClassification (nl)": 67.49, - "MassiveScenarioClassification (pl)": 61.29, - "MassiveScenarioClassification (pt)": 64.26, - "MassiveScenarioClassification (ro)": 58.03, - "MassiveScenarioClassification (ru)": 65.41, - "MassiveScenarioClassification (sl)": 59.36, - "MassiveScenarioClassification (sq)": 62.69, - "MassiveScenarioClassification (sv)": 64.35, - "MassiveScenarioClassification (sw)": 25.12, - "MassiveScenarioClassification (ta)": 8.67, - "MassiveScenarioClassification (te)": 7.82, - "MassiveScenarioClassification (th)": 54.65, - "MassiveScenarioClassification (tl)": 36.09, - "MassiveScenarioClassification (tr)": 60.89, - "MassiveScenarioClassification (ur)": 54.71, - "MassiveScenarioClassification (vi)": 55.15, - "MassiveScenarioClassification (zh-CN)": 66.44, - "MassiveScenarioClassification (zh-TW)": 62.89, - "PAC": 68.17, - "PolEmo2.0-IN": 48.84, - "PolEmo2.0-OUT": 30.0, - "ToxicConversationsClassification": 69.09, - "TweetSentimentExtractionClassification": 59.97 + "Model": "all-MiniLM-L6-v2-instruct" } ] }, "Clustering": { "v_measure": [ { - "Model": "distiluse-base-multilingual-cased-v2", - "8TagsClustering": 12.51, - "AlloProfClusteringP2P": 55.95, - "AlloProfClusteringS2S": 35.39, - "ArxivClusteringP2P": 33.59, - "HALClusteringS2S": 18.2, - "MLSUMClusteringP2P": 40.17, - "MLSUMClusteringS2S": 34.65, - "MasakhaNEWSClusteringP2P (fra)": 53.76, - "MasakhaNEWSClusteringS2S (fra)": 32.76 + "Model": "all-MiniLM-L6-v2-instruct" } ] }, "PairClassification": { "ap": [ { - "Model": "distiluse-base-multilingual-cased-v2", - "CDSC-E": 71.83, - "OpusparcusPC (fr)": 92.07, - "PPC": 86.83, - "PSC": 96.35, - "PawsXPairClassification (fr)": 51.08, - "SICK-E-PL": 62.05, - "SprintDuplicateQuestions": 87.15, - "TwitterSemEval2015": 61.67, - "TwitterURLCorpus": 84.02 + "Model": "all-MiniLM-L6-v2-instruct" } ] }, "Reranking": { "map": [ { - "Model": "distiluse-base-multilingual-cased-v2", - "AlloprofReranking": 51.77, - "AskUbuntuDupQuestions": 53.75, - "MindSmallReranking": 30.39, - "SciDocsRR": 69.22, - "StackOverflowDupQuestions": 41.92, - "SyntecReranking": 74.78 + "Model": "all-MiniLM-L6-v2-instruct" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "distiluse-base-multilingual-cased-v2", - "AlloprofRetrieval": 26.99, - "ArguAna-PL": 36.7, - "BSARDRetrieval": 0.0, - "DBPedia-PL": 12.36, - "FiQA-PL": 8.02, - "HotpotQA-PL": 20.83, - "MSMARCO-PL": 4.57, - "MintakaRetrieval (fr)": 22.55, - "NFCorpus-PL": 16.28, - "NQ-PL": 5.85, - "Quora-PL": 71.95, - "SCIDOCS-PL": 6.5, - "SciFact-PL": 33.03, - "SyntecRetrieval": 65.34, - "TRECCOVID-PL": 16.91, - "XPQARetrieval (fr)": 51.2 + "Model": "all-MiniLM-L6-v2-instruct", + "ARCChallenge": 9.4, + "AlphaNLI": 15.09, + "HellaSwag": 20.51, + "PIQA": 24.68, + "Quail": 3.46, + "RARbCode": 42.47, + "RARbMath": 62.39, + "SIQA": 1.53, + "SpartQA": 0.57, + "TempReasonL1": 1.05, + "TempReasonL2Fact": 16.57, + "TempReasonL2Pure": 0.49, + "TempReasonL3Fact": 14.01, + "TempReasonL3Pure": 6.27, + "WinoGrande": 20.73 } ] }, "STS": { "spearman": [ { - "Model": "distiluse-base-multilingual-cased-v2", - "BIOSSES": 78.34, - "CDSC-R": 87.67, - "SICK-R": 75.25, - "SICK-R-PL": 65.53, - "SICKFr": 72.49, - "STS12": 72.96, - "STS13": 70.58, - "STS14": 70.29, - "STS15": 81.94, - "STS16": 76.8, - "STS17 (ar-ar)": 77.34, - "STS17 (en-ar)": 77.46, - "STS17 (en-de)": 80.24, - "STS17 (en-en)": 86.19, - "STS17 (en-tr)": 74.34, - "STS17 (es-en)": 77.4, - "STS17 (es-es)": 83.71, - "STS17 (fr-en)": 79.28, - "STS17 (it-en)": 80.82, - "STS17 (ko-ko)": 76.4, - "STS17 (nl-en)": 80.51, - "STS22 (ar)": 49.04, - "STS22 (de)": 35.73, - "STS22 (de-en)": 47.51, - "STS22 (de-fr)": 60.76, - "STS22 (de-pl)": 36.09, - "STS22 (en)": 62.88, - "STS22 (es)": 59.34, - "STS22 (es-en)": 68.96, - "STS22 (es-it)": 63.28, - "STS22 (fr)": 76.41, - "STS22 (fr-pl)": 61.98, - "STS22 (it)": 65.1, - "STS22 (pl)": 34.58, - "STS22 (pl-en)": 71.33, - "STS22 (ru)": 52.4, - "STS22 (tr)": 54.07, - "STS22 (zh)": 54.32, - "STS22 (zh-en)": 61.75, - "STSBenchmark": 80.75, - "STSBenchmarkMultilingualSTS (fr)": 77.49 + "Model": "all-MiniLM-L6-v2-instruct" } ] }, "Summarization": { "spearman": [ { - "Model": "distiluse-base-multilingual-cased-v2", - "SummEvalFr": 28.12 + "Model": "all-MiniLM-L6-v2-instruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "distiluse-base-multilingual-cased-v2" + "Model": "all-MiniLM-L6-v2-instruct" } ] } }, - "all-MiniLM-L6-v2": { + "google-gecko.text-embedding-preview-0409": { "BitextMining": { "f1": [ { - "Model": "all-MiniLM-L6-v2", - "BornholmBitextMining": 29.68, - "BornholmBitextMining (dan-Latn)": 29.68, - "Tatoeba (kab-Latn_eng-Latn)": 0.96, - "Tatoeba (aze-Latn_eng-Latn)": 1.04, - "Tatoeba (wuu-Hans_eng-Latn)": 0.6, - "Tatoeba (fra-Latn_eng-Latn)": 8.17, - "Tatoeba (nov-Latn_eng-Latn)": 13.97, - "Tatoeba (slk-Latn_eng-Latn)": 3.27, - "Tatoeba (mkd-Cyrl_eng-Latn)": 0.0, - "Tatoeba (ukr-Cyrl_eng-Latn)": 0.3, - "Tatoeba (kur-Latn_eng-Latn)": 5.21, - "Tatoeba (hin-Deva_eng-Latn)": 0.0, - "Tatoeba (tgl-Latn_eng-Latn)": 2.69, - "Tatoeba (jav-Latn_eng-Latn)": 3.37, - "Tatoeba (nob-Latn_eng-Latn)": 4.34, - "Tatoeba (tam-Taml_eng-Latn)": 0.33, - "Tatoeba (hsb-Latn_eng-Latn)": 2.65, - "Tatoeba (srp-Cyrl_eng-Latn)": 1.28, - "Tatoeba (cat-Latn_eng-Latn)": 6.93, - "Tatoeba (jpn-Jpan_eng-Latn)": 0.97, - "Tatoeba (kzj-Latn_eng-Latn)": 2.78, - "Tatoeba (uig-Arab_eng-Latn)": 0.2, - "Tatoeba (max-Deva_eng-Latn)": 6.93, - "Tatoeba (dtp-Latn_eng-Latn)": 1.88, - "Tatoeba (cbk-Latn_eng-Latn)": 7.04, - "Tatoeba (bre-Latn_eng-Latn)": 3.22, - "Tatoeba (arz-Arab_eng-Latn)": 0.0, - "Tatoeba (heb-Hebr_eng-Latn)": 0.22, - "Tatoeba (kat-Geor_eng-Latn)": 0.3, - "Tatoeba (yid-Hebr_eng-Latn)": 0.14, - "Tatoeba (lit-Latn_eng-Latn)": 0.92, - "Tatoeba (ber-Tfng_eng-Latn)": 4.69, - "Tatoeba (hun-Latn_eng-Latn)": 3.56, - "Tatoeba (mhr-Cyrl_eng-Latn)": 0.0, - "Tatoeba (isl-Latn_eng-Latn)": 2.37, - "Tatoeba (ind-Latn_eng-Latn)": 3.86, - "Tatoeba (tuk-Latn_eng-Latn)": 3.52, - "Tatoeba (kor-Hang_eng-Latn)": 0.45, - "Tatoeba (ara-Arab_eng-Latn)": 0.0, - "Tatoeba (tzl-Latn_eng-Latn)": 4.58, - "Tatoeba (swe-Latn_eng-Latn)": 6.06, - "Tatoeba (ang-Latn_eng-Latn)": 15.64, - "Tatoeba (mon-Cyrl_eng-Latn)": 0.38, - "Tatoeba (urd-Arab_eng-Latn)": 0.1, - "Tatoeba (vie-Latn_eng-Latn)": 3.07, - "Tatoeba (ina-Latn_eng-Latn)": 17.63, - "Tatoeba (hrv-Latn_eng-Latn)": 3.83, - "Tatoeba (war-Latn_eng-Latn)": 4.94, - "Tatoeba (cor-Latn_eng-Latn)": 2.41, - "Tatoeba (tur-Latn_eng-Latn)": 3.59, - "Tatoeba (bul-Cyrl_eng-Latn)": 0.21, - "Tatoeba (spa-Latn_eng-Latn)": 5.63, - "Tatoeba (tel-Telu_eng-Latn)": 0.46, - "Tatoeba (nds-Latn_eng-Latn)": 9.56, - "Tatoeba (lvs-Latn_eng-Latn)": 2.61, - "Tatoeba (amh-Ethi_eng-Latn)": 0.25, - "Tatoeba (pms-Latn_eng-Latn)": 7.62, - "Tatoeba (xho-Latn_eng-Latn)": 4.01, - "Tatoeba (epo-Latn_eng-Latn)": 5.46, - "Tatoeba (por-Latn_eng-Latn)": 8.29, - "Tatoeba (ile-Latn_eng-Latn)": 13.54, - "Tatoeba (ell-Grek_eng-Latn)": 0.1, - "Tatoeba (oci-Latn_eng-Latn)": 6.55, - "Tatoeba (pes-Arab_eng-Latn)": 0.0, - "Tatoeba (tat-Cyrl_eng-Latn)": 0.44, - "Tatoeba (awa-Deva_eng-Latn)": 0.51, - "Tatoeba (fao-Latn_eng-Latn)": 5.33, - "Tatoeba (swg-Latn_eng-Latn)": 8.92, - "Tatoeba (uzb-Latn_eng-Latn)": 2.34, - "Tatoeba (cym-Latn_eng-Latn)": 6.09, - "Tatoeba (mar-Deva_eng-Latn)": 0.0, - "Tatoeba (fry-Latn_eng-Latn)": 11.22, - "Tatoeba (ces-Latn_eng-Latn)": 3.04, - "Tatoeba (afr-Latn_eng-Latn)": 5.89, - "Tatoeba (csb-Latn_eng-Latn)": 3.78, - "Tatoeba (pol-Latn_eng-Latn)": 2.58, - "Tatoeba (gla-Latn_eng-Latn)": 2.7, - "Tatoeba (deu-Latn_eng-Latn)": 7.89, - "Tatoeba (cmn-Hans_eng-Latn)": 1.92, - "Tatoeba (ita-Latn_eng-Latn)": 9.9, - "Tatoeba (ben-Beng_eng-Latn)": 0.0, - "Tatoeba (glg-Latn_eng-Latn)": 9.31, - "Tatoeba (dsb-Latn_eng-Latn)": 2.9, - "Tatoeba (pam-Latn_eng-Latn)": 3.54, - "Tatoeba (ast-Latn_eng-Latn)": 6.84, - "Tatoeba (bos-Latn_eng-Latn)": 5.58, - "Tatoeba (nld-Latn_eng-Latn)": 10.16, - "Tatoeba (bel-Cyrl_eng-Latn)": 0.5, - "Tatoeba (orv-Cyrl_eng-Latn)": 0.0, - "Tatoeba (gsw-Latn_eng-Latn)": 11.33, - "Tatoeba (dan-Latn_eng-Latn)": 7.84, - "Tatoeba (hye-Armn_eng-Latn)": 0.41, - "Tatoeba (mal-Mlym_eng-Latn)": 0.15, - "Tatoeba (arq-Arab_eng-Latn)": 0.11, - "Tatoeba (kaz-Cyrl_eng-Latn)": 0.42, - "Tatoeba (khm-Khmr_eng-Latn)": 0.42, - "Tatoeba (tha-Thai_eng-Latn)": 0.3, - "Tatoeba (swh-Latn_eng-Latn)": 5.8, - "Tatoeba (gle-Latn_eng-Latn)": 2.75, - "Tatoeba (ceb-Latn_eng-Latn)": 3.39, - "Tatoeba (sqi-Latn_eng-Latn)": 3.58, - "Tatoeba (slv-Latn_eng-Latn)": 3.25, - "Tatoeba (ido-Latn_eng-Latn)": 7.48, - "Tatoeba (yue-Hant_eng-Latn)": 0.86, - "Tatoeba (nno-Latn_eng-Latn)": 5.38, - "Tatoeba (est-Latn_eng-Latn)": 2.36, - "Tatoeba (lfn-Latn_eng-Latn)": 4.55, - "Tatoeba (lat-Latn_eng-Latn)": 5.04, - "Tatoeba (cha-Latn_eng-Latn)": 13.29, - "Tatoeba (eus-Latn_eng-Latn)": 5.54, - "Tatoeba (fin-Latn_eng-Latn)": 2.79, - "Tatoeba (rus-Cyrl_eng-Latn)": 0.07, - "Tatoeba (ron-Latn_eng-Latn)": 6.82, - "Tatoeba (zsm-Latn_eng-Latn)": 4.24 + "Model": "google-gecko.text-embedding-preview-0409" } ] }, "Classification": { "accuracy": [ { - "Model": "all-MiniLM-L6-v2", - "AllegroReviews (pol-Latn)": 24.64, - "AmazonCounterfactualClassification (en)": 63.64, - "AmazonCounterfactualClassification (en-ext)": 65.59, - "AmazonCounterfactualClassification (deu-Latn)": 57.82, - "AmazonCounterfactualClassification (jpn-Jpan)": 60.9, - "AmazonPolarityClassification": 64.26, - "AmazonReviewsClassification (en)": 30.85, - "AmazonReviewsClassification (deu-Latn)": 26.44, - "AmazonReviewsClassification (spa-Latn)": 27.35, - "AmazonReviewsClassification (fra-Latn)": 26.88, - "AmazonReviewsClassification (jpn-Jpan)": 23.78, - "AmazonReviewsClassification (cmn-Hans)": 23.67, - "AngryTweetsClassification": 42.49, - "AngryTweetsClassification (dan-Latn)": 42.48, - "Banking77Classification": 80.04, - "CBD (pol-Latn)": 50.9, - "DKHateClassification": 55.05, - "DanishPoliticalCommentsClassification": 26.96, - "DanishPoliticalCommentsClassification (dan-Latn)": 26.7, - "EmotionClassification": 40.83, - "GeoreviewClassification (rus-Cyrl)": 27.08, - "HeadlineClassification (rus-Cyrl)": 27.77, - "IFlyTek (cmn-Hans)": 16.09, - "ImdbClassification": 61.76, - "InappropriatenessClassification (rus-Cyrl)": 51.73, - "JDReview (cmn-Hans)": 59.98, - "KinopoiskClassification (rus-Cyrl)": 33.93, - "LccSentimentClassification": 38.47, - "LccSentimentClassification (dan-Latn)": 38.53, - "MTOPDomainClassification (en)": 91.68, - "MTOPDomainClassification (deu-Latn)": 70.47, - "MTOPDomainClassification (spa-Latn)": 72.99, - "MTOPDomainClassification (fra-Latn)": 75.1, - "MTOPDomainClassification (hin-Deva)": 40.74, - "MTOPDomainClassification (tha-Thai)": 15.66, - "MTOPIntentClassification (en)": 61.55, - "MTOPIntentClassification (deu-Latn)": 45.7, - "MTOPIntentClassification (spa-Latn)": 44.19, - "MTOPIntentClassification (fra-Latn)": 39.67, - "MTOPIntentClassification (hin-Deva)": 18.69, - "MTOPIntentClassification (tha-Thai)": 5.78, - "MasakhaNEWSClassification (fra)": 74.05, - "MasakhaNEWSClassification (amh-Ethi)": 33.03, - "MasakhaNEWSClassification (eng)": 77.11, - "MasakhaNEWSClassification (fra-Latn)": 68.84, - "MasakhaNEWSClassification (hau-Latn)": 50.49, - "MasakhaNEWSClassification (ibo-Latn)": 52.15, - "MasakhaNEWSClassification (lin-Latn)": 68.29, - "MasakhaNEWSClassification (lug-Latn)": 47.58, - "MasakhaNEWSClassification (orm-Ethi)": 50.68, - "MasakhaNEWSClassification (pcm-Latn)": 92.56, - "MasakhaNEWSClassification (run-Latn)": 54.81, - "MasakhaNEWSClassification (sna-Latn)": 65.58, - "MasakhaNEWSClassification (som-Latn)": 39.8, - "MasakhaNEWSClassification (swa-Latn)": 47.25, - "MasakhaNEWSClassification (tir-Ethi)": 28.97, - "MasakhaNEWSClassification (xho-Latn)": 54.14, - "MasakhaNEWSClassification (yor-Latn)": 55.01, - "MassiveIntentClassification (en)": 66.94, - "MassiveIntentClassification (da)": 40.99, - "MassiveIntentClassification (nb)": 39.34, - "MassiveIntentClassification (sv)": 38.1, - "MassiveIntentClassification (aze-Latn)": 30.63, - "MassiveIntentClassification (spa-Latn)": 39.88, - "MassiveIntentClassification (tam-Taml)": 11.31, - "MassiveIntentClassification (swe-Latn)": 38.09, - "MassiveIntentClassification (fas-Arab)": 19.1, - "MassiveIntentClassification (khm-Khmr)": 4.89, - "MassiveIntentClassification (mon-Cyrl)": 20.35, - "MassiveIntentClassification (hye-Armn)": 7.62, - "MassiveIntentClassification (kan-Knda)": 3.14, - "MassiveIntentClassification (cmo-Hans)": 24.4, - "MassiveIntentClassification (rus-Cyrl)": 27.58, - "MassiveIntentClassification (jpn-Jpan)": 31.87, - "MassiveIntentClassification (deu-Latn)": 43.44, - "MassiveIntentClassification (ind-Latn)": 39.02, - "MassiveIntentClassification (cym-Latn)": 34.54, - "MassiveIntentClassification (nld-Latn)": 40.2, - "MassiveIntentClassification (hin-Deva)": 17.7, - "MassiveIntentClassification (afr-Latn)": 37.45, - "MassiveIntentClassification (ell-Grek)": 24.19, - "MassiveIntentClassification (mal-Mlym)": 2.87, - "MassiveIntentClassification (por-Latn)": 43.76, - "MassiveIntentClassification (sqi-Latn)": 40.7, - "MassiveIntentClassification (urd-Arab)": 14.42, - "MassiveIntentClassification (vie-Latn)": 37.09, - "MassiveIntentClassification (hun-Latn)": 35.69, - "MassiveIntentClassification (ron-Latn)": 40.54, - "MassiveIntentClassification (ara-Arab)": 19.05, - "MassiveIntentClassification (nob-Latn)": 39.36, - "MassiveIntentClassification (slv-Latn)": 36.7, - "MassiveIntentClassification (lav-Latn)": 36.97, - "MassiveIntentClassification (heb-Hebr)": 22.48, - "MassiveIntentClassification (pol-Latn)": 36.07, - "MassiveIntentClassification (ita-Latn)": 41.59, - "MassiveIntentClassification (msa-Latn)": 35.07, - "MassiveIntentClassification (mya-Mymr)": 4.24, - "MassiveIntentClassification (isl-Latn)": 29.95, - "MassiveIntentClassification (tel-Telu)": 2.46, - "MassiveIntentClassification (swa-Latn)": 34.98, - "MassiveIntentClassification (amh-Ethi)": 2.62, - "MassiveIntentClassification (cmo-Hant)": 22.56, - "MassiveIntentClassification (tha-Thai)": 11.26, - "MassiveIntentClassification (ben-Beng)": 13.1, - "MassiveIntentClassification (fin-Latn)": 38.37, - "MassiveIntentClassification (fra-Latn)": 42.55, - "MassiveIntentClassification (kor-Kore)": 16.05, - "MassiveIntentClassification (kat-Geor)": 9.07, - "MassiveIntentClassification (dan-Latn)": 41.0, - "MassiveIntentClassification (tur-Latn)": 33.76, - "MassiveIntentClassification (tgl-Latn)": 37.92, - "MassiveIntentClassification (jav-Latn)": 35.91, - "MassiveScenarioClassification (en)": 73.81, - "MassiveScenarioClassification (da)": 47.01, - "MassiveScenarioClassification (nb)": 44.67, - "MassiveScenarioClassification (sv)": 42.93, - "MassiveScenarioClassification (mal-Mlym)": 7.67, - "MassiveScenarioClassification (khm-Khmr)": 9.25, - "MassiveScenarioClassification (deu-Latn)": 51.47, - "MassiveScenarioClassification (msa-Latn)": 43.67, - "MassiveScenarioClassification (heb-Hebr)": 24.01, - "MassiveScenarioClassification (mon-Cyrl)": 25.47, - "MassiveScenarioClassification (mya-Mymr)": 10.61, - "MassiveScenarioClassification (ind-Latn)": 43.46, - "MassiveScenarioClassification (nob-Latn)": 44.67, - "MassiveScenarioClassification (fra-Latn)": 51.14, - "MassiveScenarioClassification (tgl-Latn)": 45.69, - "MassiveScenarioClassification (amh-Ethi)": 7.57, - "MassiveScenarioClassification (fas-Arab)": 23.97, - "MassiveScenarioClassification (vie-Latn)": 40.47, - "MassiveScenarioClassification (sqi-Latn)": 47.21, - "MassiveScenarioClassification (dan-Latn)": 47.02, - "MassiveScenarioClassification (spa-Latn)": 49.0, - "MassiveScenarioClassification (pol-Latn)": 43.82, - "MassiveScenarioClassification (tel-Telu)": 7.95, - "MassiveScenarioClassification (tha-Thai)": 19.5, - "MassiveScenarioClassification (kor-Kore)": 20.3, - "MassiveScenarioClassification (cmo-Hans)": 33.65, - "MassiveScenarioClassification (urd-Arab)": 23.73, - "MassiveScenarioClassification (aze-Latn)": 35.59, - "MassiveScenarioClassification (ron-Latn)": 48.23, - "MassiveScenarioClassification (jav-Latn)": 43.59, - "MassiveScenarioClassification (slv-Latn)": 41.9, - "MassiveScenarioClassification (kat-Geor)": 14.92, - "MassiveScenarioClassification (lav-Latn)": 40.43, - "MassiveScenarioClassification (cym-Latn)": 39.0, - "MassiveScenarioClassification (swe-Latn)": 42.95, - "MassiveScenarioClassification (rus-Cyrl)": 30.46, - "MassiveScenarioClassification (ben-Beng)": 20.56, - "MassiveScenarioClassification (por-Latn)": 50.72, - "MassiveScenarioClassification (hye-Armn)": 13.03, - "MassiveScenarioClassification (jpn-Jpan)": 37.3, - "MassiveScenarioClassification (nld-Latn)": 48.43, - "MassiveScenarioClassification (swa-Latn)": 43.32, - "MassiveScenarioClassification (tam-Taml)": 17.37, - "MassiveScenarioClassification (isl-Latn)": 36.12, - "MassiveScenarioClassification (kan-Knda)": 7.85, - "MassiveScenarioClassification (ell-Grek)": 31.3, - "MassiveScenarioClassification (tur-Latn)": 38.85, - "MassiveScenarioClassification (cmo-Hant)": 31.18, - "MassiveScenarioClassification (fin-Latn)": 42.38, - "MassiveScenarioClassification (hin-Deva)": 23.71, - "MassiveScenarioClassification (ara-Arab)": 25.99, - "MassiveScenarioClassification (hun-Latn)": 41.61, - "MassiveScenarioClassification (afr-Latn)": 43.87, - "MassiveScenarioClassification (ita-Latn)": 49.8, - "MultilingualSentiment (cmn-Hans)": 41.28, - "NoRecClassification": 40.02, - "NoRecClassification (nob-Latn)": 37.93, - "NordicLangClassification": 54.71, - "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 54.7, - "NorwegianParliament": 54.8, - "OnlineShopping (cmn-Hans)": 57.74, - "PAC (pol-Latn)": 59.78, - "PolEmo2.0-IN (pol-Latn)": 40.29, - "PolEmo2.0-OUT (pol-Latn)": 25.0, - "RuReviewsClassification (rus-Cyrl)": 41.79, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 10.08, - "RuSciBenchOECDClassification (rus-Cyrl)": 8.3, - "ScalaDaClassification": 50.03, - "ScalaNbClassification": 50.17, - "TNews (cmn-Hans)": 20.12, - "ToxicConversationsClassification": 62.09, - "TweetSentimentExtractionClassification": 54.04, - "Waimai (cmn-Hans)": 62.72 + "Model": "google-gecko.text-embedding-preview-0409", + "AmazonCounterfactualClassification (en)": 75.34, + "AmazonPolarityClassification": 97.34, + "AmazonReviewsClassification (en)": 51.17, + "Banking77Classification": 88.62, + "EmotionClassification": 52.51, + "ImdbClassification": 95.65, + "MTOPDomainClassification (en)": 98.35, + "MTOPIntentClassification (en)": 83.43, + "MassiveIntentClassification (en)": 80.22, + "MassiveScenarioClassification (en)": 87.19, + "ToxicConversationsClassification": 89.67, + "TweetSentimentExtractionClassification": 74.52 } ] }, "Clustering": { "v_measure": [ { - "Model": "all-MiniLM-L6-v2", - "AlloProfClusteringP2P": 51.83, - "AlloProfClusteringS2S": 32.07, - "ArxivClusteringP2P": 46.55, - "ArxivClusteringS2S": 37.86, - "BiorxivClusteringP2P": 38.37, - "BiorxivClusteringS2S": 32.88, - "GeoreviewClusteringP2P (rus-Cyrl)": 20.25, - "HALClusteringS2S": 18.84, - "MLSUMClusteringP2P": 36.74, - "MLSUMClusteringP2P (rus-Cyrl)": 23.91, - "MLSUMClusteringS2S": 28.12, - "MLSUMClusteringS2S (rus-Cyrl)": 19.07, - "MasakhaNEWSClusteringP2P (fra)": 34.92, - "MasakhaNEWSClusteringP2P (amh-Ethi)": 43.85, - "MasakhaNEWSClusteringP2P (eng)": 48.88, - "MasakhaNEWSClusteringP2P (fra-Latn)": 34.92, - "MasakhaNEWSClusteringP2P (hau-Latn)": 24.77, - "MasakhaNEWSClusteringP2P (ibo-Latn)": 45.94, - "MasakhaNEWSClusteringP2P (lin-Latn)": 69.56, - "MasakhaNEWSClusteringP2P (lug-Latn)": 49.4, - "MasakhaNEWSClusteringP2P (orm-Ethi)": 25.34, - "MasakhaNEWSClusteringP2P (pcm-Latn)": 85.57, - "MasakhaNEWSClusteringP2P (run-Latn)": 50.75, - "MasakhaNEWSClusteringP2P (sna-Latn)": 41.68, - "MasakhaNEWSClusteringP2P (som-Latn)": 29.02, - "MasakhaNEWSClusteringP2P (swa-Latn)": 21.87, - "MasakhaNEWSClusteringP2P (tir-Ethi)": 42.93, - "MasakhaNEWSClusteringP2P (xho-Latn)": 28.58, - "MasakhaNEWSClusteringP2P (yor-Latn)": 31.45, - "MasakhaNEWSClusteringS2S (fra)": 40.58, - "MasakhaNEWSClusteringS2S (amh-Ethi)": 45.44, - "MasakhaNEWSClusteringS2S (eng)": 41.09, - "MasakhaNEWSClusteringS2S (fra-Latn)": 40.58, - "MasakhaNEWSClusteringS2S (hau-Latn)": 15.42, - "MasakhaNEWSClusteringS2S (ibo-Latn)": 37.02, - "MasakhaNEWSClusteringS2S (lin-Latn)": 65.14, - "MasakhaNEWSClusteringS2S (lug-Latn)": 44.21, - "MasakhaNEWSClusteringS2S (orm-Ethi)": 24.79, - "MasakhaNEWSClusteringS2S (pcm-Latn)": 61.48, - "MasakhaNEWSClusteringS2S (run-Latn)": 51.25, - "MasakhaNEWSClusteringS2S (sna-Latn)": 42.74, - "MasakhaNEWSClusteringS2S (som-Latn)": 30.08, - "MasakhaNEWSClusteringS2S (swa-Latn)": 9.55, - "MasakhaNEWSClusteringS2S (tir-Ethi)": 46.04, - "MasakhaNEWSClusteringS2S (xho-Latn)": 27.08, - "MasakhaNEWSClusteringS2S (yor-Latn)": 31.04, - "MedrxivClusteringP2P": 34.39, - "MedrxivClusteringS2S": 31.86, - "RedditClustering": 50.7, - "RedditClusteringP2P": 54.8, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 10.21, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 9.43, - "StackExchangeClustering": 53.14, - "StackExchangeClusteringP2P": 34.26, - "TwentyNewsgroupsClustering": 46.49 + "Model": "google-gecko.text-embedding-preview-0409", + "ArxivClusteringP2P": 46.27, + "ArxivClusteringS2S": 38.36, + "BiorxivClusteringP2P": 37.87, + "BiorxivClusteringS2S": 35.67, + "MedrxivClusteringP2P": 33.11, + "MedrxivClusteringS2S": 31.54, + "RedditClustering": 65.81, + "RedditClusteringP2P": 66.62, + "StackExchangeClustering": 74.52, + "StackExchangeClusteringP2P": 37.63, + "TwentyNewsgroupsClustering": 54.87 } ] }, "PairClassification": { "ap": [ { - "Model": "all-MiniLM-L6-v2", - "CDSC-E (pol-Latn)": 47.27, - "OpusparcusPC (fr)": 86.53, - "OpusparcusPC (deu-Latn)": 89.91, - "OpusparcusPC (en)": 97.46, - "OpusparcusPC (fin-Latn)": 85.44, - "OpusparcusPC (fra-Latn)": 86.53, - "OpusparcusPC (rus-Cyrl)": 79.28, - "OpusparcusPC (swe-Latn)": 83.78, - "PSC (pol-Latn)": 81.87, - "PawsXPairClassification (fr)": 55.4, - "PawsXPairClassification (deu-Latn)": 51.22, - "PawsXPairClassification (en)": 59.1, - "PawsXPairClassification (spa-Latn)": 52.21, - "PawsXPairClassification (fra-Latn)": 55.41, - "PawsXPairClassification (jpn-Hira)": 48.97, - "PawsXPairClassification (kor-Hang)": 50.53, - "PawsXPairClassification (cmn-Hans)": 53.11, - "SICK-E-PL (pol-Latn)": 47.32, - "SprintDuplicateQuestions": 94.55, - "TERRa (rus-Cyrl)": 45.03, - "TwitterSemEval2015": 67.86, - "TwitterURLCorpus": 84.7 + "Model": "google-gecko.text-embedding-preview-0409", + "SprintDuplicateQuestions": 96.26, + "TwitterSemEval2015": 79.04, + "TwitterURLCorpus": 87.53 } ] }, "Reranking": { "map": [ { - "Model": "all-MiniLM-L6-v2", - "AlloprofReranking": 31.69, - "AlloprofReranking (fra-Latn)": 62.62, - "AskUbuntuDupQuestions": 63.48, - "MMarcoReranking (cmn-Hans)": 4.74, - "MindSmallReranking": 30.8, - "RuBQReranking (rus-Cyrl)": 27.05, - "SciDocsRR": 87.12, - "StackOverflowDupQuestions": 50.76, - "SyntecReranking": 59.57, - "SyntecReranking (fra-Latn)": 67.31, - "T2Reranking (cmn-Hans)": 56.26 + "Model": "google-gecko.text-embedding-preview-0409", + "AskUbuntuDupQuestions": 64.4, + "MindSmallReranking": 33.07, + "SciDocsRR": 83.59, + "StackOverflowDupQuestions": 54.56 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "all-MiniLM-L6-v2", - "AILACasedocs": 19.72, - "AILAStatutes": 20.52, - "ARCChallenge": 9.48, - "AlloprofRetrieval": 28.41, - "AlloprofRetrieval (fra-Latn)": 28.41, - "AlphaNLI": 28.19, - "ArguAna": 50.17, - "ArguAna-PL (pol-Latn)": 11.5, - "BSARDRetrieval": 0.0, - "BSARDRetrieval (fra-Latn)": 4.8, - "CQADupstackRetrieval": 41.32, - "ClimateFEVER": 20.27, - "CmedqaRetrieval (cmn-Hans)": 2.03, - "CovidRetrieval (cmn-Hans)": 0.8, - "DBPedia": 32.33, - "DuRetrieval (cmn-Hans)": 3.03, - "EcomRetrieval (cmn-Hans)": 3.7, - "FEVER": 51.93, - "FiQA-PL (pol-Latn)": 2.29, - "FiQA2018": 36.87, - "GerDaLIRSmall (deu-Latn)": 2.41, - "HellaSwag": 24.21, - "HotpotQA": 46.51, - "LEMBNarrativeQARetrieval": 18.27, - "LEMBNeedleRetrieval": 20.0, - "LEMBPasskeyRetrieval": 23.25, - "LEMBQMSumRetrieval": 16.32, - "LEMBSummScreenFDRetrieval": 54.8, - "LEMBWikimQARetrieval": 46.23, - "LeCaRDv2 (zho-Hans)": 17.5, - "LegalBenchConsumerContractsQA": 65.6, - "LegalBenchCorporateLobbying": 86.41, - "LegalQuAD (deu-Latn)": 11.81, - "LegalSummarization": 59.0, - "MMarcoRetrieval (cmn-Hans)": 6.21, - "MSMARCO": 36.54, - "MedicalRetrieval (cmn-Hans)": 1.76, - "MintakaRetrieval (fr)": 9.19, - "MintakaRetrieval (ara-Arab)": 2.22, - "MintakaRetrieval (deu-Latn)": 15.43, - "MintakaRetrieval (spa-Latn)": 7.72, - "MintakaRetrieval (fra-Latn)": 9.19, - "MintakaRetrieval (hin-Deva)": 2.65, - "MintakaRetrieval (ita-Latn)": 8.48, - "MintakaRetrieval (jpn-Hira)": 6.7, - "MintakaRetrieval (por-Latn)": 9.76, - "NFCorpus": 31.59, - "NFCorpus-PL (pol-Latn)": 10.62, - "NQ": 43.87, - "PIQA": 25.28, - "Quail": 3.92, - "QuoraRetrieval": 87.56, - "RARbCode": 44.27, - "RARbMath": 68.19, - "RiaNewsRetrieval (rus-Cyrl)": 0.67, - "RuBQRetrieval (rus-Cyrl)": 2.64, - "SCIDOCS": 21.64, - "SCIDOCS-PL (pol-Latn)": 3.75, - "SIQA": 1.56, - "SciFact": 64.51, - "SciFact-PL (pol-Latn)": 16.14, - "SpartQA": 1.65, - "SyntecRetrieval": 60.15, - "SyntecRetrieval (fra-Latn)": 60.15, - "T2Retrieval (cmn-Hans)": 1.6, - "TRECCOVID": 47.25, - "TRECCOVID-PL (pol-Latn)": 8.66, - "TempReasonL1": 1.53, - "TempReasonL2Fact": 17.65, - "TempReasonL2Pure": 0.46, - "TempReasonL3Fact": 14.16, - "TempReasonL3Pure": 6.33, - "Touche2020": 16.9, - "VideoRetrieval (cmn-Hans)": 9.79, - "WinoGrande": 47.33, - "XPQARetrieval (fr)": 51.79, - "XPQARetrieval (ara-Arab_ara-Arab)": 8.03, - "XPQARetrieval (eng-Latn_ara-Arab)": 1.86, - "XPQARetrieval (ara-Arab_eng-Latn)": 6.87, - "XPQARetrieval (deu-Latn_deu-Latn)": 53.25, - "XPQARetrieval (eng-Latn_deu-Latn)": 10.99, - "XPQARetrieval (deu-Latn_eng-Latn)": 27.59, - "XPQARetrieval (spa-Latn_spa-Latn)": 38.87, - "XPQARetrieval (eng-Latn_spa-Latn)": 5.46, - "XPQARetrieval (spa-Latn_eng-Latn)": 22.2, - "XPQARetrieval (fra-Latn_fra-Latn)": 51.79, - "XPQARetrieval (eng-Latn_fra-Latn)": 8.57, - "XPQARetrieval (fra-Latn_eng-Latn)": 31.36, - "XPQARetrieval (hin-Deva_hin-Deva)": 35.3, - "XPQARetrieval (eng-Latn_hin-Deva)": 6.28, - "XPQARetrieval (hin-Deva_eng-Latn)": 6.0, - "XPQARetrieval (ita-Latn_ita-Latn)": 54.57, - "XPQARetrieval (eng-Latn_ita-Latn)": 6.79, - "XPQARetrieval (ita-Latn_eng-Latn)": 24.13, - "XPQARetrieval (jpn-Hira_jpn-Hira)": 39.23, - "XPQARetrieval (eng-Latn_jpn-Hira)": 4.1, - "XPQARetrieval (jpn-Hira_eng-Latn)": 13.05, - "XPQARetrieval (kor-Hang_kor-Hang)": 10.24, - "XPQARetrieval (eng-Latn_kor-Hang)": 5.72, - "XPQARetrieval (kor-Hang_eng-Latn)": 6.37, - "XPQARetrieval (pol-Latn_pol-Latn)": 22.33, - "XPQARetrieval (eng-Latn_pol-Latn)": 7.58, - "XPQARetrieval (pol-Latn_eng-Latn)": 14.43, - "XPQARetrieval (por-Latn_por-Latn)": 31.93, - "XPQARetrieval (eng-Latn_por-Latn)": 5.9, - "XPQARetrieval (por-Latn_eng-Latn)": 20.74, - "XPQARetrieval (tam-Taml_tam-Taml)": 7.43, - "XPQARetrieval (eng-Latn_tam-Taml)": 3.42, - "XPQARetrieval (tam-Taml_eng-Latn)": 2.91, - "XPQARetrieval (cmn-Hans_cmn-Hans)": 19.39, - "XPQARetrieval (eng-Latn_cmn-Hans)": 5.05, - "XPQARetrieval (cmn-Hans_eng-Latn)": 8.77 + "Model": "google-gecko.text-embedding-preview-0409", + "ArguAna": 62.18, + "BrightRetrieval (earth_science)": 34.38, + "BrightRetrieval (leetcode)": 29.64, + "BrightRetrieval (theoremqa_questions)": 21.51, + "BrightRetrieval (aops)": 9.33, + "BrightRetrieval (sustainable_living)": 17.25, + "BrightRetrieval (pony)": 3.59, + "BrightRetrieval (theoremqa_theorems)": 16.77, + "BrightRetrieval (stackoverflow)": 17.93, + "BrightRetrieval (biology)": 22.98, + "BrightRetrieval (robotics)": 15.98, + "BrightRetrieval (economics)": 19.5, + "BrightRetrieval (psychology)": 27.86, + "CQADupstackRetrieval": 48.89, + "ClimateFEVER": 33.21, + "DBPedia": 47.12, + "FEVER": 86.96, + "FiQA2018": 59.24, + "HotpotQA": 71.33, + "MSMARCO": 32.58, + "NFCorpus": 40.33, + "NQ": 61.28, + "QuoraRetrieval": 88.18, + "SCIDOCS": 20.34, + "SciFact": 75.42, + "TRECCOVID": 82.62, + "Touche2020": 25.86 } ] }, "STS": { "spearman": [ { - "Model": "all-MiniLM-L6-v2", - "AFQMC (cmn-Hans)": 8.59, - "ATEC (cmn-Hans)": 13.52, - "BIOSSES": 81.64, - "BQ (cmn-Hans)": 23.84, - "CDSC-R (pol-Latn)": 79.45, - "LCQMC (cmn-Hans)": 23.85, - "PAWSX (cmn-Hans)": 7.21, - "RUParaPhraserSTS (rus-Cyrl)": 43.93, - "RuSTSBenchmarkSTS (rus-Cyrl)": 55.56, - "SICK-R": 77.58, - "SICK-R-PL (pol-Latn)": 52.43, - "SICKFr": 62.48, - "SICKFr (fra-Latn)": 62.48, - "STS12": 72.37, - "STS13": 80.6, - "STS14": 75.59, - "STS15": 85.39, - "STS16": 78.99, - "STS17 (ar-ar)": 50.89, - "STS17 (en-ar)": -4.28, - "STS17 (en-de)": 35.82, - "STS17 (en-en)": 87.59, - "STS17 (en-tr)": 4.5, - "STS17 (es-en)": 16.31, - "STS17 (es-es)": 76.12, - "STS17 (fr-en)": 37.09, - "STS17 (it-en)": 24.45, - "STS17 (ko-ko)": 43.39, - "STS17 (nl-en)": 29.0, - "STS17 (ara-Arab)": 50.89, - "STS17 (spa-Latn_eng-Latn)": 16.31, - "STS17 (kor-Hang)": 43.39, - "STS17 (eng-Latn_tur-Latn)": 4.5, - "STS17 (fra-Latn_eng-Latn)": 37.09, - "STS17 (nld-Latn_eng-Latn)": 29.0, - "STS17 (eng-Latn_ara-Arab)": -4.28, - "STS17 (spa-Latn)": 76.12, - "STS17 (eng-Latn_deu-Latn)": 35.82, - "STS17 (ita-Latn_eng-Latn)": 24.45, - "STS22 (ar)": 22.64, - "STS22 (de)": 31.04, - "STS22 (de-en)": 44.04, - "STS22 (de-fr)": 30.07, - "STS22 (de-pl)": 4.93, - "STS22 (en)": 67.71, - "STS22 (es)": 54.78, - "STS22 (es-en)": 53.42, - "STS22 (es-it)": 44.27, - "STS22 (fr)": 77.0, - "STS22 (fr-pl)": 50.71, - "STS22 (it)": 60.4, - "STS22 (pl)": 26.77, - "STS22 (pl-en)": 32.8, - "STS22 (ru)": 14.72, - "STS22 (tr)": 33.69, - "STS22 (zh)": 44.93, - "STS22 (zh-en)": 41.64, - "STS22 (tur-Latn)": 33.69, - "STS22 (spa-Latn)": 54.78, - "STS22 (ara-Arab)": 22.64, - "STS22 (deu-Latn_pol-Latn)": -4.93, - "STS22 (spa-Latn_eng-Latn)": 53.42, - "STS22 (cmn-Hans_eng-Latn)": 41.64, - "STS22 (rus-Cyrl)": 14.72, - "STS22 (spa-Latn_ita-Latn)": 44.27, - "STS22 (deu-Latn_fra-Latn)": 30.07, - "STS22 (deu-Latn)": 31.04, - "STS22 (fra-Latn_pol-Latn)": 50.71, - "STS22 (pol-Latn)": 26.77, - "STS22 (pol-Latn_eng-Latn)": 32.8, - "STS22 (deu-Latn_eng-Latn)": 44.04, - "STS22 (ita-Latn)": 60.4, - "STS22 (fra-Latn)": 77.0, - "STS22 (cmn-Hans)": 44.93, - "STSB (cmn-Hans)": 37.8, - "STSBenchmark": 82.03, - "STSBenchmarkMultilingualSTS (fr)": 64.93, - "STSBenchmarkMultilingualSTS (pol-Latn)": 56.42, - "STSBenchmarkMultilingualSTS (por-Latn)": 61.56, - "STSBenchmarkMultilingualSTS (ita-Latn)": 59.24, - "STSBenchmarkMultilingualSTS (fra-Latn)": 64.93, - "STSBenchmarkMultilingualSTS (deu-Latn)": 62.4, - "STSBenchmarkMultilingualSTS (cmn-Hans)": 39.74, - "STSBenchmarkMultilingualSTS (spa-Latn)": 61.62, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 55.55, - "STSBenchmarkMultilingualSTS (en)": 82.03, - "STSBenchmarkMultilingualSTS (nld-Latn)": 55.46 + "Model": "google-gecko.text-embedding-preview-0409", + "BIOSSES": 89.46, + "SICK-R": 81.93, + "STS12": 77.59, + "STS13": 90.36, + "STS14": 85.25, + "STS15": 89.66, + "STS16": 87.34, + "STS17 (en-en)": 92.06, + "STS22 (en)": 68.02, + "STSBenchmark": 88.99 } ] }, "Summarization": { "spearman": [ { - "Model": "all-MiniLM-L6-v2", - "SummEval": 30.81, - "SummEvalFr": 28.28, - "SummEvalFr (fra-Latn)": 28.29 + "Model": "google-gecko.text-embedding-preview-0409", + "SummEval": 32.63 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "all-MiniLM-L6-v2" + "Model": "google-gecko.text-embedding-preview-0409", + "Core17InstructionRetrieval": 5.44, + "News21InstructionRetrieval": 3.94, + "Robust04InstructionRetrieval": -2.4 } ] } }, - "sentence-croissant-llm-base": { + "LaBSE": { "BitextMining": { "f1": [ { - "Model": "sentence-croissant-llm-base" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "sentence-croissant-llm-base", - "AmazonReviewsClassification (fr)": 34.79, - "MTOPDomainClassification (fr)": 85.52, - "MTOPIntentClassification (fr)": 63.12, - "MasakhaNEWSClassification (fra)": 79.29, - "MassiveIntentClassification (fr)": 59.41, - "MassiveScenarioClassification (fr)": 65.29 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "sentence-croissant-llm-base", - "AlloProfClusteringP2P": 64.12, - "AlloProfClusteringS2S": 32.52, - "HALClusteringS2S": 23.4, - "MLSUMClusteringP2P": 42.94, - "MLSUMClusteringS2S": 33.91, - "MasakhaNEWSClusteringP2P (fra)": 53.94, - "MasakhaNEWSClusteringS2S (fra)": 41.05 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "sentence-croissant-llm-base", - "OpusparcusPC (fr)": 91.42, - "PawsXPairClassification (fr)": 63.13 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "sentence-croissant-llm-base", - "AlloprofReranking": 53.0, - "SyntecReranking": 82.9 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "sentence-croissant-llm-base", - "AlloprofRetrieval": 29.97, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 21.31, - "SyntecRetrieval": 74.2, - "XPQARetrieval (fr)": 58.57 + "Model": "LaBSE", + "BUCC (de-en)": 99.35, + "BUCC (fr-en)": 98.72, + "BUCC (ru-en)": 97.78, + "BUCC (zh-en)": 99.16, + "BornholmBitextMining (dan-Latn)": 45.63, + "Tatoeba (ber-Tfng_eng-Latn)": 8.4, + "Tatoeba (kab-Latn_eng-Latn)": 4.31, + "Tatoeba (tur-Latn_eng-Latn)": 98.0, + "Tatoeba (gle-Latn_eng-Latn)": 93.8, + "Tatoeba (awa-Deva_eng-Latn)": 71.7, + "Tatoeba (yue-Hant_eng-Latn)": 89.58, + "Tatoeba (tzl-Latn_eng-Latn)": 58.88, + "Tatoeba (tat-Cyrl_eng-Latn)": 85.92, + "Tatoeba (fin-Latn_eng-Latn)": 96.37, + "Tatoeba (cor-Latn_eng-Latn)": 10.11, + "Tatoeba (hye-Armn_eng-Latn)": 94.09, + "Tatoeba (ben-Beng_eng-Latn)": 88.55, + "Tatoeba (epo-Latn_eng-Latn)": 98.2, + "Tatoeba (ile-Latn_eng-Latn)": 85.58, + "Tatoeba (nld-Latn_eng-Latn)": 96.07, + "Tatoeba (mar-Deva_eng-Latn)": 92.65, + "Tatoeba (cmn-Hans_eng-Latn)": 95.1, + "Tatoeba (hin-Deva_eng-Latn)": 96.87, + "Tatoeba (tgl-Latn_eng-Latn)": 96.02, + "Tatoeba (mon-Cyrl_eng-Latn)": 95.91, + "Tatoeba (oci-Latn_eng-Latn)": 65.81, + "Tatoeba (dan-Latn_eng-Latn)": 95.71, + "Tatoeba (mkd-Cyrl_eng-Latn)": 93.6, + "Tatoeba (ces-Latn_eng-Latn)": 96.68, + "Tatoeba (fra-Latn_eng-Latn)": 94.86, + "Tatoeba (yid-Hebr_eng-Latn)": 88.79, + "Tatoeba (est-Latn_eng-Latn)": 96.55, + "Tatoeba (ast-Latn_eng-Latn)": 90.68, + "Tatoeba (ind-Latn_eng-Latn)": 93.66, + "Tatoeba (bre-Latn_eng-Latn)": 15.07, + "Tatoeba (eus-Latn_eng-Latn)": 95.01, + "Tatoeba (heb-Hebr_eng-Latn)": 91.53, + "Tatoeba (rus-Cyrl_eng-Latn)": 93.75, + "Tatoeba (lfn-Latn_eng-Latn)": 67.54, + "Tatoeba (jav-Latn_eng-Latn)": 79.77, + "Tatoeba (ukr-Cyrl_eng-Latn)": 93.97, + "Tatoeba (ell-Grek_eng-Latn)": 95.35, + "Tatoeba (nds-Latn_eng-Latn)": 79.42, + "Tatoeba (arz-Arab_eng-Latn)": 76.0, + "Tatoeba (gla-Latn_eng-Latn)": 85.66, + "Tatoeba (cbk-Latn_eng-Latn)": 79.44, + "Tatoeba (max-Deva_eng-Latn)": 63.26, + "Tatoeba (ron-Latn_eng-Latn)": 96.92, + "Tatoeba (ido-Latn_eng-Latn)": 89.42, + "Tatoeba (lvs-Latn_eng-Latn)": 95.88, + "Tatoeba (khm-Khmr_eng-Latn)": 78.37, + "Tatoeba (urd-Arab_eng-Latn)": 93.22, + "Tatoeba (glg-Latn_eng-Latn)": 96.82, + "Tatoeba (gsw-Latn_eng-Latn)": 46.5, + "Tatoeba (swe-Latn_eng-Latn)": 95.63, + "Tatoeba (swh-Latn_eng-Latn)": 84.5, + "Tatoeba (tha-Thai_eng-Latn)": 96.14, + "Tatoeba (tam-Taml_eng-Latn)": 89.0, + "Tatoeba (uzb-Latn_eng-Latn)": 84.23, + "Tatoeba (bul-Cyrl_eng-Latn)": 94.58, + "Tatoeba (kur-Latn_eng-Latn)": 83.59, + "Tatoeba (ina-Latn_eng-Latn)": 95.37, + "Tatoeba (nov-Latn_eng-Latn)": 74.38, + "Tatoeba (afr-Latn_eng-Latn)": 96.18, + "Tatoeba (csb-Latn_eng-Latn)": 52.57, + "Tatoeba (war-Latn_eng-Latn)": 60.29, + "Tatoeba (cha-Latn_eng-Latn)": 31.77, + "Tatoeba (pes-Arab_eng-Latn)": 94.7, + "Tatoeba (kat-Geor_eng-Latn)": 95.02, + "Tatoeba (bos-Latn_eng-Latn)": 94.92, + "Tatoeba (kor-Hang_eng-Latn)": 90.95, + "Tatoeba (slk-Latn_eng-Latn)": 96.5, + "Tatoeba (fry-Latn_eng-Latn)": 89.31, + "Tatoeba (ara-Arab_eng-Latn)": 88.8, + "Tatoeba (sqi-Latn_eng-Latn)": 96.76, + "Tatoeba (ita-Latn_eng-Latn)": 92.72, + "Tatoeba (lat-Latn_eng-Latn)": 80.07, + "Tatoeba (hsb-Latn_eng-Latn)": 67.11, + "Tatoeba (swg-Latn_eng-Latn)": 59.36, + "Tatoeba (srp-Cyrl_eng-Latn)": 94.43, + "Tatoeba (isl-Latn_eng-Latn)": 94.75, + "Tatoeba (hrv-Latn_eng-Latn)": 96.95, + "Tatoeba (wuu-Hans_eng-Latn)": 90.18, + "Tatoeba (mhr-Cyrl_eng-Latn)": 15.74, + "Tatoeba (vie-Latn_eng-Latn)": 97.2, + "Tatoeba (cym-Latn_eng-Latn)": 92.0, + "Tatoeba (dsb-Latn_eng-Latn)": 64.81, + "Tatoeba (hun-Latn_eng-Latn)": 96.55, + "Tatoeba (slv-Latn_eng-Latn)": 96.03, + "Tatoeba (orv-Cyrl_eng-Latn)": 38.93, + "Tatoeba (cat-Latn_eng-Latn)": 95.38, + "Tatoeba (dtp-Latn_eng-Latn)": 10.85, + "Tatoeba (por-Latn_eng-Latn)": 94.14, + "Tatoeba (jpn-Jpan_eng-Latn)": 95.38, + "Tatoeba (ang-Latn_eng-Latn)": 59.28, + "Tatoeba (aze-Latn_eng-Latn)": 94.93, + "Tatoeba (kzj-Latn_eng-Latn)": 11.33, + "Tatoeba (deu-Latn_eng-Latn)": 99.2, + "Tatoeba (uig-Arab_eng-Latn)": 92.4, + "Tatoeba (tel-Telu_eng-Latn)": 97.86, + "Tatoeba (tuk-Latn_eng-Latn)": 75.27, + "Tatoeba (nob-Latn_eng-Latn)": 98.4, + "Tatoeba (nno-Latn_eng-Latn)": 94.48, + "Tatoeba (spa-Latn_eng-Latn)": 98.4, + "Tatoeba (mal-Mlym_eng-Latn)": 98.45, + "Tatoeba (pam-Latn_eng-Latn)": 10.73, + "Tatoeba (xho-Latn_eng-Latn)": 91.55, + "Tatoeba (arq-Arab_eng-Latn)": 42.69, + "Tatoeba (kaz-Cyrl_eng-Latn)": 87.49, + "Tatoeba (bel-Cyrl_eng-Latn)": 95.0, + "Tatoeba (pol-Latn_eng-Latn)": 97.22, + "Tatoeba (fao-Latn_eng-Latn)": 87.4, + "Tatoeba (zsm-Latn_eng-Latn)": 95.62, + "Tatoeba (lit-Latn_eng-Latn)": 96.47, + "Tatoeba (ceb-Latn_eng-Latn)": 64.42, + "Tatoeba (pms-Latn_eng-Latn)": 64.57, + "Tatoeba (amh-Ethi_eng-Latn)": 91.47, + "Tatoeba (afr-eng)": 96.18, + "Tatoeba (amh-eng)": 91.47, + "Tatoeba (ang-eng)": 59.28, + "Tatoeba (ara-eng)": 88.8, + "Tatoeba (arq-eng)": 42.69, + "Tatoeba (arz-eng)": 76.0, + "Tatoeba (ast-eng)": 90.68, + "Tatoeba (awa-eng)": 71.7, + "Tatoeba (aze-eng)": 94.93, + "Tatoeba (bel-eng)": 95.0, + "Tatoeba (ben-eng)": 88.55, + "Tatoeba (ber-eng)": 8.4, + "Tatoeba (bos-eng)": 94.92, + "Tatoeba (bre-eng)": 15.07, + "Tatoeba (bul-eng)": 94.58, + "Tatoeba (cat-eng)": 95.38, + "Tatoeba (cbk-eng)": 79.44, + "Tatoeba (ceb-eng)": 64.42, + "Tatoeba (ces-eng)": 96.68, + "Tatoeba (cha-eng)": 31.77, + "Tatoeba (cmn-eng)": 95.1, + "Tatoeba (cor-eng)": 10.11, + "Tatoeba (csb-eng)": 52.57, + "Tatoeba (cym-eng)": 92.0, + "Tatoeba (dan-eng)": 95.71, + "Tatoeba (deu-eng)": 99.2, + "Tatoeba (dsb-eng)": 64.81, + "Tatoeba (dtp-eng)": 10.85, + "Tatoeba (ell-eng)": 95.35, + "Tatoeba (epo-eng)": 98.2, + "Tatoeba (est-eng)": 96.55, + "Tatoeba (eus-eng)": 95.01, + "Tatoeba (fao-eng)": 87.4, + "Tatoeba (fin-eng)": 96.37, + "Tatoeba (fra-eng)": 94.86, + "Tatoeba (fry-eng)": 89.31, + "Tatoeba (gla-eng)": 85.66, + "Tatoeba (gle-eng)": 93.8, + "Tatoeba (glg-eng)": 96.82, + "Tatoeba (gsw-eng)": 46.5, + "Tatoeba (heb-eng)": 91.53, + "Tatoeba (hin-eng)": 96.87, + "Tatoeba (hrv-eng)": 96.95, + "Tatoeba (hsb-eng)": 67.11, + "Tatoeba (hun-eng)": 96.55, + "Tatoeba (hye-eng)": 94.09, + "Tatoeba (ido-eng)": 89.42, + "Tatoeba (ile-eng)": 85.58, + "Tatoeba (ina-eng)": 95.37, + "Tatoeba (ind-eng)": 93.66, + "Tatoeba (isl-eng)": 94.75, + "Tatoeba (ita-eng)": 92.72, + "Tatoeba (jav-eng)": 79.77, + "Tatoeba (jpn-eng)": 95.38, + "Tatoeba (kab-eng)": 4.31, + "Tatoeba (kat-eng)": 95.02, + "Tatoeba (kaz-eng)": 87.49, + "Tatoeba (khm-eng)": 78.37, + "Tatoeba (kor-eng)": 90.95, + "Tatoeba (kur-eng)": 83.59, + "Tatoeba (kzj-eng)": 11.33, + "Tatoeba (lat-eng)": 80.07, + "Tatoeba (lfn-eng)": 67.54, + "Tatoeba (lit-eng)": 96.47, + "Tatoeba (lvs-eng)": 95.88, + "Tatoeba (mal-eng)": 98.45, + "Tatoeba (mar-eng)": 92.65, + "Tatoeba (max-eng)": 63.26, + "Tatoeba (mhr-eng)": 15.74, + "Tatoeba (mkd-eng)": 93.6, + "Tatoeba (mon-eng)": 95.91, + "Tatoeba (nds-eng)": 79.42, + "Tatoeba (nld-eng)": 96.07, + "Tatoeba (nno-eng)": 94.48, + "Tatoeba (nob-eng)": 98.4, + "Tatoeba (nov-eng)": 74.38, + "Tatoeba (oci-eng)": 65.81, + "Tatoeba (orv-eng)": 38.93, + "Tatoeba (pam-eng)": 10.73, + "Tatoeba (pes-eng)": 94.7, + "Tatoeba (pms-eng)": 64.57, + "Tatoeba (pol-eng)": 97.22, + "Tatoeba (por-eng)": 94.14, + "Tatoeba (ron-eng)": 96.92, + "Tatoeba (rus-eng)": 93.75, + "Tatoeba (slk-eng)": 96.5, + "Tatoeba (slv-eng)": 96.03, + "Tatoeba (spa-eng)": 98.4, + "Tatoeba (sqi-eng)": 96.76, + "Tatoeba (srp-eng)": 94.43, + "Tatoeba (swe-eng)": 95.63, + "Tatoeba (swg-eng)": 59.36, + "Tatoeba (swh-eng)": 84.5, + "Tatoeba (tam-eng)": 89.0, + "Tatoeba (tat-eng)": 85.92, + "Tatoeba (tel-eng)": 97.86, + "Tatoeba (tgl-eng)": 96.02, + "Tatoeba (tha-eng)": 96.14, + "Tatoeba (tuk-eng)": 75.27, + "Tatoeba (tur-eng)": 98.0, + "Tatoeba (tzl-eng)": 58.88, + "Tatoeba (uig-eng)": 92.4, + "Tatoeba (ukr-eng)": 93.97, + "Tatoeba (urd-eng)": 93.22, + "Tatoeba (uzb-eng)": 84.23, + "Tatoeba (vie-eng)": 97.2, + "Tatoeba (war-eng)": 60.29, + "Tatoeba (wuu-eng)": 90.18, + "Tatoeba (xho-eng)": 91.55, + "Tatoeba (yid-eng)": 88.79, + "Tatoeba (yue-eng)": 89.58, + "Tatoeba (zsm-eng)": 95.62 } ] }, - "STS": { - "spearman": [ - { - "Model": "sentence-croissant-llm-base", - "SICKFr": 69.6, - "STS22 (fr)": 78.77, - "STSBenchmarkMultilingualSTS (fr)": 79.23 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "sentence-croissant-llm-base", - "SummEvalFr": 29.04 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "sentence-croissant-llm-base" - } - ] - } - }, - "all-mpnet-base-v2-instruct": { - "BitextMining": { - "f1": [ - { - "Model": "all-mpnet-base-v2-instruct" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "all-mpnet-base-v2-instruct" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "all-mpnet-base-v2-instruct" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "all-mpnet-base-v2-instruct" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "all-mpnet-base-v2-instruct" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "all-mpnet-base-v2-instruct", - "ARCChallenge": 10.35, - "AlphaNLI": 1.96, - "HellaSwag": 13.01, - "PIQA": 27.18, - "Quail": 3.02, - "RARbCode": 48.95, - "RARbMath": 69.21, - "SIQA": 1.29, - "SpartQA": 1.01, - "TempReasonL1": 1.52, - "TempReasonL2Fact": 7.28, - "TempReasonL2Pure": 1.03, - "TempReasonL3Fact": 7.03, - "TempReasonL3Pure": 5.16, - "WinoGrande": 9.66 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "all-mpnet-base-v2-instruct" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "all-mpnet-base-v2-instruct" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "all-mpnet-base-v2-instruct" - } - ] - } - }, - "bge-large-zh-v1.5": { - "BitextMining": { - "f1": [ - { - "Model": "bge-large-zh-v1.5" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "bge-large-zh-v1.5", - "AmazonReviewsClassification (zh)": 41.38, - "IFlyTek": 48.74, - "JDReview": 85.14, - "MassiveIntentClassification (zh-CN)": 68.84, - "MassiveScenarioClassification (zh-CN)": 74.7, - "MultilingualSentiment": 72.97, - "OnlineShopping": 91.43, - "TNews": 52.1, - "Waimai": 86.9 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "bge-large-zh-v1.5", - "CLSClusteringP2P": 41.44, - "CLSClusteringS2S": 38.33, - "ThuNewsClusteringP2P": 59.61, - "ThuNewsClusteringS2S": 56.58 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "bge-large-zh-v1.5", - "Cmnli": 85.27, - "Ocnli": 77.94 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "bge-large-zh-v1.5", - "CMedQAv1": 83.45, - "CMedQAv2": 85.44, - "MMarcoReranking": 28.74, - "T2Reranking": 65.74 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "bge-large-zh-v1.5", - "CmedqaRetrieval": 42.57, - "CovidRetrieval": 73.35, - "DuRetrieval": 86.32, - "EcomRetrieval": 65.33, - "MMarcoRetrieval": 79.23, - "MedicalRetrieval": 59.59, - "T2Retrieval": 83.99, - "VideoRetrieval": 73.32 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "bge-large-zh-v1.5", - "AFQMC": 44.36, - "ATEC": 49.54, - "BQ": 62.94, - "LCQMC": 74.33, - "PAWSX": 33.92, - "QBQTC": 37.29, - "STS22 (zh)": 68.94, - "STSB": 78.7 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "bge-large-zh-v1.5" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "bge-large-zh-v1.5" - } - ] - } - }, - "text-embedding-ada-002": { - "BitextMining": { - "f1": [ - { - "Model": "text-embedding-ada-002" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "text-embedding-ada-002", - "AmazonCounterfactualClassification (en)": 75.94, - "AmazonPolarityClassification": 86.72, - "AmazonReviewsClassification (zh)": 38.3, - "AmazonReviewsClassification (en)": 44.78, - "AmazonReviewsClassification (fr)": 43.76, - "Banking77Classification": 80.66, - "EmotionClassification": 48.74, - "IFlyTek": 44.62, - "ImdbClassification": 77.98, - "JDReview": 74.6, - "MTOPDomainClassification (en)": 92.13, - "MTOPDomainClassification (fr)": 89.38, - "MTOPIntentClassification (en)": 64.68, - "MTOPIntentClassification (fr)": 64.45, - "MasakhaNEWSClassification (fra)": 81.52, - "MassiveIntentClassification (zh-CN)": 64.81, - "MassiveIntentClassification (en)": 70.15, - "MassiveIntentClassification (fr)": 65.42, - "MassiveScenarioClassification (zh-CN)": 71.4, - "MassiveScenarioClassification (en)": 75.33, - "MassiveScenarioClassification (fr)": 71.11, - "MultilingualSentiment": 67.99, - "OnlineShopping": 88.94, - "TNews": 45.77, - "ToxicConversationsClassification": 72.29, - "TweetSentimentExtractionClassification": 61.81, - "Waimai": 82.37 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "text-embedding-ada-002", - "AlloProfClusteringP2P": 64.83, - "AlloProfClusteringS2S": 53.52, - "ArxivClusteringP2P": 45.01, - "ArxivClusteringS2S": 36.85, - "BiorxivClusteringP2P": 36.66, - "BiorxivClusteringS2S": 34.21, - "CLSClusteringP2P": 38.26, - "CLSClusteringS2S": 35.91, - "HALClusteringS2S": 26.18, - "MLSUMClusteringP2P": 44.59, - "MLSUMClusteringS2S": 41.67, - "MasakhaNEWSClusteringP2P (fra)": 68.35, - "MasakhaNEWSClusteringS2S (fra)": 48.58, - "MedrxivClusteringP2P": 32.6, - "MedrxivClusteringS2S": 30.8, - "RedditClustering": 61.42, - "RedditClusteringP2P": 64.13, - "StackExchangeClustering": 72.22, - "StackExchangeClusteringP2P": 38.49, - "ThuNewsClusteringP2P": 58.71, - "ThuNewsClusteringS2S": 49.86, - "TwentyNewsgroupsClustering": 52.56 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "text-embedding-ada-002", - "Cmnli": 76.03, - "Ocnli": 63.08, - "OpusparcusPC (fr)": 94.12, - "PawsXPairClassification (fr)": 60.16, - "SprintDuplicateQuestions": 92.17, - "TwitterSemEval2015": 75.28, - "TwitterURLCorpus": 87.22 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "text-embedding-ada-002", - "AskUbuntuDupQuestions": 62.05, - "CMedQAv1": 63.08, - "CMedQAv2": 64.02, - "MMarcoReranking": 23.39, - "MindSmallReranking": 31.45, - "SciDocsRR": 81.22, - "StackOverflowDupQuestions": 50.54, - "SyntecReranking": 89.87, - "T2Reranking": 66.65 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "text-embedding-ada-002", - "ARCChallenge": 13.3, - "AlloprofRetrieval": 51.64, - "AlphaNLI": 25.65, - "ArguAna": 57.44, - "BSARDRetrieval": 0.61, - "CQADupstackRetrieval": 41.69, - "ClimateFEVER": 21.64, - "CmedqaRetrieval": 22.36, - "CovidRetrieval": 57.21, - "DBPedia": 39.39, - "DuRetrieval": 71.17, - "EcomRetrieval": 44.49, - "FEVER": 74.99, - "FiQA2018": 44.41, - "HellaSwag": 29.29, - "HotpotQA": 60.9, - "MMarcoRetrieval": 69.86, - "MSMARCO": 40.91, - "MedicalRetrieval": 37.92, - "MintakaRetrieval (fr)": 29.94, - "NFCorpus": 36.97, - "NQ": 51.58, - "PIQA": 31.02, - "Quail": 5.83, - "QuoraRetrieval": 87.6, - "RARbCode": 83.39, - "RARbMath": 73.21, - "SCIDOCS": 18.36, - "SIQA": 3.14, - "SciFact": 72.75, - "SpartQA": 4.23, - "SyntecRetrieval": 85.97, - "T2Retrieval": 69.14, - "TRECCOVID": 68.47, - "TempReasonL1": 1.68, - "TempReasonL2Fact": 19.93, - "TempReasonL2Pure": 2.6, - "TempReasonL3Fact": 18.02, - "TempReasonL3Pure": 7.58, - "Touche2020": 21.61, - "VideoRetrieval": 43.85, - "WinoGrande": 19.65, - "XPQARetrieval (fr)": 73.0 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "text-embedding-ada-002", - "AFQMC": 23.88, - "ATEC": 29.25, - "BIOSSES": 86.35, - "BQ": 45.33, - "LCQMC": 68.41, - "PAWSX": 16.55, - "QBQTC": 30.27, - "SICK-R": 80.6, - "SICKFr": 76.28, - "STS12": 69.8, - "STS13": 83.27, - "STS14": 76.09, - "STS15": 86.12, - "STS16": 85.96, - "STS17 (en-en)": 90.25, - "STS22 (zh)": 62.53, - "STS22 (en)": 68.12, - "STS22 (tr)": 64.5, - "STS22 (fr)": 81.09, - "STSB": 70.61, - "STSBenchmark": 83.17, - "STSBenchmarkMultilingualSTS (fr)": 77.55 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "text-embedding-ada-002", - "SummEval": 30.8, - "SummEvalFr": 30.5 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "text-embedding-ada-002" - } - ] - } - }, - "LLM2Vec-Meta-Llama-3-supervised": { - "BitextMining": { - "f1": [ - { - "Model": "LLM2Vec-Meta-Llama-3-supervised" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "LLM2Vec-Meta-Llama-3-supervised", - "AmazonCounterfactualClassification (en)": 79.94, - "AmazonPolarityClassification": 86.07, - "AmazonReviewsClassification (en)": 46.84, - "Banking77Classification": 88.05, - "EmotionClassification": 51.2, - "ImdbClassification": 82.94, - "MTOPDomainClassification (en)": 96.14, - "MTOPIntentClassification (en)": 86.11, - "MassiveIntentClassification (en)": 79.8, - "MassiveScenarioClassification (en)": 81.52, - "ToxicConversationsClassification": 70.59, - "TweetSentimentExtractionClassification": 61.9 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "LLM2Vec-Meta-Llama-3-supervised", - "ArxivClusteringP2P": 44.27, - "ArxivClusteringS2S": 46.85, - "BiorxivClusteringP2P": 32.35, - "BiorxivClusteringS2S": 36.7, - "MedrxivClusteringP2P": 30.71, - "MedrxivClusteringS2S": 32.96, - "RedditClustering": 61.72, - "RedditClusteringP2P": 63.98, - "StackExchangeClustering": 72.74, - "StackExchangeClusteringP2P": 32.26, - "TwentyNewsgroupsClustering": 56.41 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "LLM2Vec-Meta-Llama-3-supervised", - "SprintDuplicateQuestions": 95.09, - "TwitterSemEval2015": 81.73, - "TwitterURLCorpus": 86.56 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "LLM2Vec-Meta-Llama-3-supervised", - "AskUbuntuDupQuestions": 65.19, - "MindSmallReranking": 32.67, - "SciDocsRR": 86.05, - "StackOverflowDupQuestions": 54.82 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "LLM2Vec-Meta-Llama-3-supervised", - "ArguAna": 62.78, - "CQADupstackRetrieval": 48.25, - "ClimateFEVER": 34.27, - "DBPedia": 48.34, - "FEVER": 90.2, - "FiQA2018": 55.33, - "HotpotQA": 71.76, - "MSMARCO": 43.24, - "NFCorpus": 41.83, - "NQ": 64.21, - "QuoraRetrieval": 87.16, - "SCIDOCS": 22.96, - "SciFact": 78.22, - "TRECCOVID": 80.34, - "Touche2020": 20.5 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "LLM2Vec-Meta-Llama-3-supervised", - "BIOSSES": 84.92, - "SICK-R": 83.94, - "STS12": 79.27, - "STS13": 84.83, - "STS14": 82.94, - "STS15": 88.09, - "STS16": 86.54, - "STS17 (en-en)": 89.58, - "STS22 (en)": 67.67, - "STSBenchmark": 88.05 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "LLM2Vec-Meta-Llama-3-supervised", - "SummEval": 30.94 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "LLM2Vec-Meta-Llama-3-supervised" - } - ] - } - }, - "gtr-t5-xl": { - "BitextMining": { - "f1": [ - { - "Model": "gtr-t5-xl", - "BUCC (de-en)": 90.99, - "BUCC (fr-en)": 88.55, - "BUCC (ru-en)": 2.07, - "BUCC (zh-en)": 1.49, - "Tatoeba (afr-eng)": 33.47, - "Tatoeba (amh-eng)": 0.01, - "Tatoeba (ang-eng)": 30.74, - "Tatoeba (ara-eng)": 0.47, - "Tatoeba (arq-eng)": 0.34, - "Tatoeba (arz-eng)": 0.14, - "Tatoeba (ast-eng)": 51.74, - "Tatoeba (awa-eng)": 0.49, - "Tatoeba (aze-eng)": 7.43, - "Tatoeba (bel-eng)": 3.45, - "Tatoeba (ben-eng)": 0.06, - "Tatoeba (ber-eng)": 5.79, - "Tatoeba (bos-eng)": 17.43, - "Tatoeba (bre-eng)": 5.69, - "Tatoeba (bul-eng)": 7.55, - "Tatoeba (cat-eng)": 48.06, - "Tatoeba (cbk-eng)": 54.56, - "Tatoeba (ceb-eng)": 8.72, - "Tatoeba (ces-eng)": 8.76, - "Tatoeba (cha-eng)": 27.56, - "Tatoeba (cmn-eng)": 2.26, - "Tatoeba (cor-eng)": 3.69, - "Tatoeba (csb-eng)": 13.18, - "Tatoeba (cym-eng)": 6.97, - "Tatoeba (dan-eng)": 47.36, - "Tatoeba (deu-eng)": 91.54, - "Tatoeba (dsb-eng)": 13.2, - "Tatoeba (dtp-eng)": 4.54, - "Tatoeba (ell-eng)": 0.55, - "Tatoeba (epo-eng)": 27.86, - "Tatoeba (est-eng)": 5.13, - "Tatoeba (eus-eng)": 10.23, - "Tatoeba (fao-eng)": 21.44, - "Tatoeba (fin-eng)": 6.62, - "Tatoeba (fra-eng)": 79.66, - "Tatoeba (fry-eng)": 32.92, - "Tatoeba (gla-eng)": 2.87, - "Tatoeba (gle-eng)": 3.26, - "Tatoeba (glg-eng)": 63.81, - "Tatoeba (gsw-eng)": 29.71, - "Tatoeba (heb-eng)": 0.33, - "Tatoeba (hin-eng)": 0.25, - "Tatoeba (hrv-eng)": 17.16, - "Tatoeba (hsb-eng)": 12.02, - "Tatoeba (hun-eng)": 7.21, - "Tatoeba (hye-eng)": 0.78, - "Tatoeba (ido-eng)": 40.83, - "Tatoeba (ile-eng)": 54.95, - "Tatoeba (ina-eng)": 72.28, - "Tatoeba (ind-eng)": 30.95, - "Tatoeba (isl-eng)": 11.29, - "Tatoeba (ita-eng)": 73.83, - "Tatoeba (jav-eng)": 8.66, - "Tatoeba (jpn-eng)": 0.61, - "Tatoeba (kab-eng)": 1.78, - "Tatoeba (kat-eng)": 0.79, - "Tatoeba (kaz-eng)": 0.95, - "Tatoeba (khm-eng)": 0.49, - "Tatoeba (kor-eng)": 1.87, - "Tatoeba (kur-eng)": 10.91, - "Tatoeba (kzj-eng)": 5.72, - "Tatoeba (lat-eng)": 18.24, - "Tatoeba (lfn-eng)": 43.49, - "Tatoeba (lit-eng)": 7.13, - "Tatoeba (lvs-eng)": 7.04, - "Tatoeba (mal-eng)": 0.44, - "Tatoeba (mar-eng)": 0.03, - "Tatoeba (max-eng)": 18.99, - "Tatoeba (mhr-eng)": 1.11, - "Tatoeba (mkd-eng)": 2.49, - "Tatoeba (mon-eng)": 2.01, - "Tatoeba (nds-eng)": 39.96, - "Tatoeba (nld-eng)": 58.86, - "Tatoeba (nno-eng)": 29.07, - "Tatoeba (nob-eng)": 40.25, - "Tatoeba (nov-eng)": 50.19, - "Tatoeba (oci-eng)": 30.72, - "Tatoeba (orv-eng)": 0.85, - "Tatoeba (pam-eng)": 7.21, - "Tatoeba (pes-eng)": 0.53, - "Tatoeba (pms-eng)": 31.07, - "Tatoeba (pol-eng)": 18.06, - "Tatoeba (por-eng)": 81.92, - "Tatoeba (ron-eng)": 62.6, - "Tatoeba (rus-eng)": 22.24, - "Tatoeba (slk-eng)": 10.59, - "Tatoeba (slv-eng)": 11.4, - "Tatoeba (spa-eng)": 85.78, - "Tatoeba (sqi-eng)": 14.92, - "Tatoeba (srp-eng)": 9.87, - "Tatoeba (swe-eng)": 55.08, - "Tatoeba (swg-eng)": 32.66, - "Tatoeba (swh-eng)": 7.64, - "Tatoeba (tam-eng)": 0.49, - "Tatoeba (tat-eng)": 1.28, - "Tatoeba (tel-eng)": 0.45, - "Tatoeba (tgl-eng)": 23.63, - "Tatoeba (tha-eng)": 0.61, - "Tatoeba (tuk-eng)": 5.71, - "Tatoeba (tur-eng)": 8.25, - "Tatoeba (tzl-eng)": 28.4, - "Tatoeba (uig-eng)": 0.57, - "Tatoeba (ukr-eng)": 5.69, - "Tatoeba (urd-eng)": 0.0, - "Tatoeba (uzb-eng)": 4.19, - "Tatoeba (vie-eng)": 9.07, - "Tatoeba (war-eng)": 12.31, - "Tatoeba (wuu-eng)": 1.38, - "Tatoeba (xho-eng)": 7.6, - "Tatoeba (yid-eng)": 0.41, - "Tatoeba (yue-eng)": 1.31, - "Tatoeba (zsm-eng)": 29.74 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "gtr-t5-xl", - "AmazonCounterfactualClassification (de)": 59.79, - "AmazonCounterfactualClassification (en)": 68.6, - "AmazonCounterfactualClassification (en-ext)": 69.03, - "AmazonCounterfactualClassification (ja)": 50.59, - "AmazonPolarityClassification": 74.58, - "AmazonReviewsClassification (de)": 35.06, - "AmazonReviewsClassification (en)": 38.2, - "AmazonReviewsClassification (es)": 37.18, - "AmazonReviewsClassification (fr)": 35.48, - "AmazonReviewsClassification (ja)": 22.24, - "AmazonReviewsClassification (zh)": 21.89, - "Banking77Classification": 82.22, - "EmotionClassification": 45.54, - "ImdbClassification": 68.15, - "MTOPDomainClassification (de)": 85.42, - "MTOPDomainClassification (en)": 93.6, - "MTOPDomainClassification (es)": 88.2, - "MTOPDomainClassification (fr)": 85.05, - "MTOPDomainClassification (hi)": 21.74, - "MTOPDomainClassification (th)": 15.87, - "MTOPIntentClassification (de)": 55.75, - "MTOPIntentClassification (en)": 65.93, - "MTOPIntentClassification (es)": 57.73, - "MTOPIntentClassification (fr)": 51.07, - "MTOPIntentClassification (hi)": 3.19, - "MTOPIntentClassification (th)": 5.55, - "MassiveIntentClassification (af)": 42.6, - "MassiveIntentClassification (am)": 2.12, - "MassiveIntentClassification (ar)": 4.64, - "MassiveIntentClassification (az)": 35.05, - "MassiveIntentClassification (bn)": 2.84, - "MassiveIntentClassification (cy)": 36.19, - "MassiveIntentClassification (da)": 48.42, - "MassiveIntentClassification (de)": 55.49, - "MassiveIntentClassification (el)": 10.14, - "MassiveIntentClassification (en)": 70.23, - "MassiveIntentClassification (es)": 56.72, - "MassiveIntentClassification (fa)": 3.54, - "MassiveIntentClassification (fi)": 37.13, - "MassiveIntentClassification (fr)": 57.67, - "MassiveIntentClassification (he)": 2.56, - "MassiveIntentClassification (hi)": 3.24, - "MassiveIntentClassification (hu)": 34.22, - "MassiveIntentClassification (hy)": 3.01, - "MassiveIntentClassification (id)": 46.54, - "MassiveIntentClassification (is)": 34.77, - "MassiveIntentClassification (it)": 54.13, - "MassiveIntentClassification (ja)": 4.27, - "MassiveIntentClassification (jv)": 36.97, - "MassiveIntentClassification (ka)": 2.72, - "MassiveIntentClassification (km)": 5.35, - "MassiveIntentClassification (kn)": 3.17, - "MassiveIntentClassification (ko)": 2.64, - "MassiveIntentClassification (lv)": 36.32, - "MassiveIntentClassification (ml)": 3.18, - "MassiveIntentClassification (mn)": 22.85, - "MassiveIntentClassification (ms)": 42.87, - "MassiveIntentClassification (my)": 4.04, - "MassiveIntentClassification (nb)": 45.87, - "MassiveIntentClassification (nl)": 49.53, - "MassiveIntentClassification (pl)": 42.64, - "MassiveIntentClassification (pt)": 57.03, - "MassiveIntentClassification (ro)": 49.95, - "MassiveIntentClassification (ru)": 36.58, - "MassiveIntentClassification (sl)": 39.44, - "MassiveIntentClassification (sq)": 41.78, - "MassiveIntentClassification (sv)": 47.95, - "MassiveIntentClassification (sw)": 35.85, - "MassiveIntentClassification (ta)": 2.32, - "MassiveIntentClassification (te)": 2.2, - "MassiveIntentClassification (th)": 3.74, - "MassiveIntentClassification (tl)": 43.12, - "MassiveIntentClassification (tr)": 35.24, - "MassiveIntentClassification (ur)": 3.0, - "MassiveIntentClassification (vi)": 30.01, - "MassiveIntentClassification (zh-CN)": 1.72, - "MassiveIntentClassification (zh-TW)": 3.35, - "MassiveScenarioClassification (af)": 52.54, - "MassiveScenarioClassification (am)": 6.3, - "MassiveScenarioClassification (ar)": 11.96, - "MassiveScenarioClassification (az)": 40.17, - "MassiveScenarioClassification (bn)": 8.29, - "MassiveScenarioClassification (cy)": 42.24, - "MassiveScenarioClassification (da)": 57.28, - "MassiveScenarioClassification (de)": 68.09, - "MassiveScenarioClassification (el)": 16.66, - "MassiveScenarioClassification (en)": 75.94, - "MassiveScenarioClassification (es)": 64.32, - "MassiveScenarioClassification (fa)": 6.9, - "MassiveScenarioClassification (fi)": 43.96, - "MassiveScenarioClassification (fr)": 66.72, - "MassiveScenarioClassification (he)": 7.51, - "MassiveScenarioClassification (hi)": 7.82, - "MassiveScenarioClassification (hu)": 42.16, - "MassiveScenarioClassification (hy)": 9.33, - "MassiveScenarioClassification (id)": 53.54, - "MassiveScenarioClassification (is)": 42.84, - "MassiveScenarioClassification (it)": 62.44, - "MassiveScenarioClassification (ja)": 7.29, - "MassiveScenarioClassification (jv)": 43.13, - "MassiveScenarioClassification (ka)": 7.63, - "MassiveScenarioClassification (km)": 9.08, - "MassiveScenarioClassification (kn)": 8.1, - "MassiveScenarioClassification (ko)": 6.35, - "MassiveScenarioClassification (lv)": 40.24, - "MassiveScenarioClassification (ml)": 7.65, - "MassiveScenarioClassification (mn)": 27.98, - "MassiveScenarioClassification (ms)": 52.41, - "MassiveScenarioClassification (my)": 9.21, - "MassiveScenarioClassification (nb)": 54.44, - "MassiveScenarioClassification (nl)": 60.35, - "MassiveScenarioClassification (pl)": 49.97, - "MassiveScenarioClassification (pt)": 62.78, - "MassiveScenarioClassification (ro)": 59.62, - "MassiveScenarioClassification (ru)": 43.44, - "MassiveScenarioClassification (sl)": 44.79, - "MassiveScenarioClassification (sq)": 50.84, - "MassiveScenarioClassification (sv)": 58.21, - "MassiveScenarioClassification (sw)": 44.63, - "MassiveScenarioClassification (ta)": 7.95, - "MassiveScenarioClassification (te)": 7.5, - "MassiveScenarioClassification (th)": 8.79, - "MassiveScenarioClassification (tl)": 53.54, - "MassiveScenarioClassification (tr)": 42.47, - "MassiveScenarioClassification (ur)": 9.58, - "MassiveScenarioClassification (vi)": 34.68, - "MassiveScenarioClassification (zh-CN)": 5.21, - "MassiveScenarioClassification (zh-TW)": 8.77, - "ToxicConversationsClassification": 67.56, - "TweetSentimentExtractionClassification": 54.77 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "gtr-t5-xl", - "ArxivClusteringP2P": 37.9, - "ArxivClusteringS2S": 30.45, - "BiorxivClusteringP2P": 30.52, - "BiorxivClusteringS2S": 26.06, - "MedrxivClusteringP2P": 28.69, - "MedrxivClusteringS2S": 26.69, - "RedditClustering": 61.34, - "RedditClusteringP2P": 61.11, - "StackExchangeClustering": 69.95, - "StackExchangeClusteringP2P": 32.73, - "TwentyNewsgroupsClustering": 51.15 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "gtr-t5-xl", - "SprintDuplicateQuestions": 95.45, - "TwitterSemEval2015": 77.81, - "TwitterURLCorpus": 85.14 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "gtr-t5-xl", - "AskUbuntuDupQuestions": 63.08, - "MindSmallReranking": 31.5, - "SciDocsRR": 76.49, - "StackOverflowDupQuestions": 52.79 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "gtr-t5-xl", - "ArguAna": 52.81, - "CQADupstackRetrieval": 37.35, - "ClimateFEVER": 27.01, - "DBPedia": 39.74, - "FEVER": 72.18, - "FiQA2018": 44.19, - "HotpotQA": 58.91, - "MSMARCO": 43.52, - "NFCorpus": 33.34, - "NQ": 56.16, - "QuoraRetrieval": 88.91, - "SCIDOCS": 15.71, - "SciFact": 64.2, - "TRECCOVID": 60.09, - "Touche2020": 25.26 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "gtr-t5-xl", - "BIOSSES": 78.94, - "SICK-R": 73.63, - "STS12": 69.11, - "STS13": 81.82, - "STS14": 77.07, - "STS15": 86.01, - "STS16": 82.23, - "STS17 (ar-ar)": 9.06, - "STS17 (en-ar)": -3.22, - "STS17 (en-de)": 70.38, - "STS17 (en-en)": 84.9, - "STS17 (en-tr)": 17.17, - "STS17 (es-en)": 60.24, - "STS17 (es-es)": 81.93, - "STS17 (fr-en)": 62.17, - "STS17 (it-en)": 59.11, - "STS17 (ko-ko)": 8.9, - "STS17 (nl-en)": 56.91, - "STS22 (ar)": 37.66, - "STS22 (de)": 50.58, - "STS22 (de-en)": 53.63, - "STS22 (de-fr)": 55.72, - "STS22 (de-pl)": 27.99, - "STS22 (en)": 66.61, - "STS22 (es)": 59.14, - "STS22 (es-en)": 69.99, - "STS22 (es-it)": 60.94, - "STS22 (fr)": 79.43, - "STS22 (fr-pl)": 61.98, - "STS22 (it)": 67.14, - "STS22 (pl)": 33.74, - "STS22 (pl-en)": 60.18, - "STS22 (ru)": 32.69, - "STS22 (tr)": 55.79, - "STS22 (zh)": 31.16, - "STS22 (zh-en)": 28.85, - "STSBenchmark": 77.65 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "gtr-t5-xl", - "SummEval": 30.21 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "gtr-t5-xl" - } - ] - } - }, - "luotuo-bert-medium": { - "BitextMining": { - "f1": [ - { - "Model": "luotuo-bert-medium" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "luotuo-bert-medium", - "AmazonReviewsClassification (zh)": 34.46, - "IFlyTek": 41.75, - "JDReview": 79.68, - "MassiveIntentClassification (zh-CN)": 57.47, - "MassiveScenarioClassification (zh-CN)": 65.32, - "MultilingualSentiment": 61.21, - "OnlineShopping": 84.3, - "TNews": 45.22, - "Waimai": 79.57 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "luotuo-bert-medium", - "CLSClusteringP2P": 37.01, - "CLSClusteringS2S": 33.46, - "ThuNewsClusteringP2P": 58.83, - "ThuNewsClusteringS2S": 48.26 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "luotuo-bert-medium", - "Cmnli": 72.55, - "Ocnli": 60.7 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "luotuo-bert-medium", - "CMedQAv1": 57.82, - "CMedQAv2": 58.88, - "MMarcoReranking": 14.55, - "T2Reranking": 65.76 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "luotuo-bert-medium", - "CmedqaRetrieval": 18.04, - "CovidRetrieval": 55.48, - "DuRetrieval": 59.36, - "EcomRetrieval": 40.48, - "MMarcoRetrieval": 55.31, - "MedicalRetrieval": 29.8, - "T2Retrieval": 58.67, - "VideoRetrieval": 38.04 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "luotuo-bert-medium", - "AFQMC": 22.24, - "ATEC": 30.84, - "BQ": 43.33, - "LCQMC": 66.74, - "PAWSX": 12.31, - "QBQTC": 27.2, - "STS22 (zh)": 66.4, - "STSB": 73.22 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "luotuo-bert-medium" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "luotuo-bert-medium" - } - ] - } - }, - "contriever-instruct": { - "BitextMining": { - "f1": [ - { - "Model": "contriever-instruct" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "contriever-instruct" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "contriever-instruct" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "contriever-instruct" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "contriever-instruct" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "contriever-instruct", - "ARCChallenge": 7.63, - "AlphaNLI": 27.09, - "PIQA": 21.73, - "Quail": 4.92, - "RARbCode": 7.12, - "RARbMath": 21.83, - "SIQA": 0.88, - "SpartQA": 10.56, - "TempReasonL1": 1.8, - "TempReasonL2Fact": 22.03, - "TempReasonL2Pure": 0.94, - "TempReasonL3Fact": 20.82, - "TempReasonL3Pure": 7.15, - "WinoGrande": 26.3 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "contriever-instruct" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "contriever-instruct" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "contriever-instruct" - } - ] - } - }, - "text-search-babbage-001": { - "BitextMining": { - "f1": [ - { - "Model": "text-search-babbage-001" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "text-search-babbage-001" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "text-search-babbage-001" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "text-search-babbage-001" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "text-search-babbage-001" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "text-search-babbage-001", - "ArguAna": 49.2, - "ClimateFEVER": 19.9, - "FEVER": 77.0, - "FiQA2018": 42.2, - "HotpotQA": 63.1, - "NFCorpus": 36.7, - "QuoraRetrieval": 69.7, - "SciFact": 70.4, - "TRECCOVID": 58.5, - "Touche2020": 29.7 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "text-search-babbage-001" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "text-search-babbage-001" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "text-search-babbage-001" - } - ] - } - }, - "Cohere-embed-multilingual-light-v3.0": { - "BitextMining": { - "f1": [ - { - "Model": "Cohere-embed-multilingual-light-v3.0" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "Cohere-embed-multilingual-light-v3.0", - "AmazonReviewsClassification (fr)": 38.6, - "MTOPDomainClassification (fr)": 80.79, - "MTOPIntentClassification (fr)": 50.01, - "MasakhaNEWSClassification (fra)": 82.58, - "MassiveIntentClassification (fr)": 56.31, - "MassiveScenarioClassification (fr)": 59.5 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "Cohere-embed-multilingual-light-v3.0", - "AlloProfClusteringP2P": 61.96, - "AlloProfClusteringS2S": 31.36, - "HALClusteringS2S": 17.31, - "MLSUMClusteringP2P": 42.8, - "MLSUMClusteringS2S": 32.72, - "MasakhaNEWSClusteringP2P (fra)": 56.81, - "MasakhaNEWSClusteringS2S (fra)": 29.41 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "Cohere-embed-multilingual-light-v3.0", - "OpusparcusPC (fr)": 90.92, - "PawsXPairClassification (fr)": 57.32 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "Cohere-embed-multilingual-light-v3.0", - "AlloprofReranking": 51.6, - "SyntecReranking": 88.03 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "Cohere-embed-multilingual-light-v3.0", - "AlloprofRetrieval": 35.39, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 23.0, - "SyntecRetrieval": 76.88, - "XPQARetrieval (fr)": 45.23 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "Cohere-embed-multilingual-light-v3.0", - "SICKFr": 75.5, - "STS22 (fr)": 82.8, - "STSBenchmarkMultilingualSTS (fr)": 76.48 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "Cohere-embed-multilingual-light-v3.0", - "SummEvalFr": 31.4 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "Cohere-embed-multilingual-light-v3.0" - } - ] - } - }, - "e5-mistral-7b-instruct": { - "BitextMining": { - "f1": [ - { - "Model": "e5-mistral-7b-instruct", - "Tatoeba (rus-Cyrl_eng-Latn)": 93.75 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "e5-mistral-7b-instruct", - "AmazonReviewsClassification (fr)": 36.71, - "GeoreviewClassification (rus-Cyrl)": 50.25, - "HeadlineClassification (rus-Cyrl)": 85.68, - "InappropriatenessClassification (rus-Cyrl)": 67.19, - "KinopoiskClassification (rus-Cyrl)": 65.49, - "MTOPDomainClassification (fr)": 74.8, - "MTOPIntentClassification (fr)": 53.97, - "MasakhaNEWSClassification (fra)": 80.59, - "MassiveIntentClassification (rus-Cyrl)": 76.08, - "MassiveIntentClassification (fr)": 46.39, - "MassiveScenarioClassification (rus-Cyrl)": 79.61, - "MassiveScenarioClassification (fr)": 53.86, - "RuReviewsClassification (rus-Cyrl)": 67.68, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 64.59, - "RuSciBenchOECDClassification (rus-Cyrl)": 51.13 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "e5-mistral-7b-instruct", - "AlloProfClusteringP2P": 61.06, - "AlloProfClusteringS2S": 28.12, - "GeoreviewClusteringP2P (rus-Cyrl)": 65.68, - "HALClusteringS2S": 19.69, - "MLSUMClusteringP2P": 45.59, - "MLSUMClusteringS2S": 32.0, - "MasakhaNEWSClusteringP2P (fra)": 52.47, - "MasakhaNEWSClusteringS2S (fra)": 49.2, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 61.55, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 52.72 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "e5-mistral-7b-instruct", - "OpusparcusPC (rus-Cyrl)": 91.44, - "OpusparcusPC (fr)": 88.5, - "PawsXPairClassification (fr)": 63.65, - "TERRa (rus-Cyrl)": 59.38 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "e5-mistral-7b-instruct", - "AlloprofReranking": 47.36, - "RuBQReranking (rus-Cyrl)": 74.61, - "SyntecReranking": 77.05 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "e5-mistral-7b-instruct", - "AILACasedocs": 38.76, - "AILAStatutes": 38.07, - "ARCChallenge": 17.81, - "AlloprofRetrieval": 16.46, - "AlphaNLI": 26.12, - "BSARDRetrieval": 0.0, - "BrightRetrieval (sustainable_living)": 18.51, - "BrightRetrieval (economics)": 15.49, - "BrightRetrieval (theoremqa_theorems)": 23.78, - "BrightRetrieval (aops)": 7.1, - "BrightRetrieval (theoremqa_questions)": 23.94, - "BrightRetrieval (stackoverflow)": 9.83, - "BrightRetrieval (psychology)": 15.79, - "BrightRetrieval (pony)": 4.81, - "BrightRetrieval (leetcode)": 28.72, - "BrightRetrieval (biology)": 18.84, - "BrightRetrieval (earth_science)": 25.96, - "BrightRetrieval (robotics)": 16.37, - "GerDaLIRSmall": 37.18, - "HellaSwag": 34.85, - "LEMBNarrativeQARetrieval": 44.62, - "LEMBNeedleRetrieval": 48.25, - "LEMBPasskeyRetrieval": 71.0, - "LEMBQMSumRetrieval": 43.63, - "LEMBSummScreenFDRetrieval": 96.82, - "LEMBWikimQARetrieval": 82.11, - "LeCaRDv2": 68.56, - "LegalBenchConsumerContractsQA": 75.46, - "LegalBenchCorporateLobbying": 94.01, - "LegalQuAD": 59.64, - "LegalSummarization": 66.51, - "MintakaRetrieval (fr)": 3.57, - "PIQA": 39.37, - "Quail": 7.01, - "RARbCode": 78.46, - "RARbMath": 72.16, - "RiaNewsRetrieval (rus-Cyrl)": 81.94, - "RuBQRetrieval (rus-Cyrl)": 73.98, - "SIQA": 5.42, - "SpartQA": 9.92, - "SyntecRetrieval": 55.9, - "TempReasonL1": 3.31, - "TempReasonL2Fact": 36.9, - "TempReasonL2Pure": 9.18, - "TempReasonL3Fact": 30.18, - "TempReasonL3Pure": 14.31, - "WinoGrande": 41.21, - "XPQARetrieval (fr)": 41.29 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "e5-mistral-7b-instruct", - "RUParaPhraserSTS (rus-Cyrl)": 76.17, - "RuSTSBenchmarkSTS (rus-Cyrl)": 84.13, - "SICKFr": 64.39, - "STS22 (fr)": 69.82, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 84.25, - "STSBenchmarkMultilingualSTS (fr)": 61.87 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "e5-mistral-7b-instruct", - "SummEvalFr": 32.22 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "e5-mistral-7b-instruct", - "Core17InstructionRetrieval": 0.09, - "News21InstructionRetrieval": -0.86, - "Robust04InstructionRetrieval": -9.59 - } - ] - } - }, - "e5-large": { - "BitextMining": { - "f1": [ - { - "Model": "e5-large", - "BornholmBitextMining": 40.15 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "e5-large", - "AngryTweetsClassification": 46.14, - "DKHateClassification": 58.72, - "DanishPoliticalCommentsClassification": 28.67, - "LccSentimentClassification": 42.13, - "MassiveIntentClassification (da)": 42.29, - "MassiveIntentClassification (nb)": 40.63, - "MassiveIntentClassification (sv)": 40.69, - "MassiveScenarioClassification (da)": 52.95, - "MassiveScenarioClassification (nb)": 51.91, - "MassiveScenarioClassification (sv)": 50.97, - "NoRecClassification": 41.83, - "NordicLangClassification": 58.3, - "NorwegianParliament": 57.26, - "ScalaDaClassification": 49.9, - "ScalaNbClassification": 50.13 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "e5-large" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "e5-large" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "e5-large" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "e5-large" - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "e5-large" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "e5-large" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "e5-large" - } - ] - } - }, - "distilrubert-small-cased-conversational": { - "BitextMining": { - "f1": [ - { - "Model": "distilrubert-small-cased-conversational", - "Tatoeba (rus-Cyrl_eng-Latn)": 24.16 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "distilrubert-small-cased-conversational", - "GeoreviewClassification (rus-Cyrl)": 38.95, - "HeadlineClassification (rus-Cyrl)": 75.59, - "InappropriatenessClassification (rus-Cyrl)": 60.68, - "KinopoiskClassification (rus-Cyrl)": 49.67, - "MassiveIntentClassification (rus-Cyrl)": 63.12, - "MassiveScenarioClassification (rus-Cyrl)": 68.08, - "RuReviewsClassification (rus-Cyrl)": 54.05, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 48.53, - "RuSciBenchOECDClassification (rus-Cyrl)": 37.65 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "distilrubert-small-cased-conversational", - "GeoreviewClusteringP2P (rus-Cyrl)": 43.26, - "MLSUMClusteringP2P (rus-Cyrl)": 50.08, - "MLSUMClusteringS2S (rus-Cyrl)": 51.12, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 37.84, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 34.12 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "distilrubert-small-cased-conversational", - "OpusparcusPC (rus-Cyrl)": 84.35, - "TERRa (rus-Cyrl)": 52.48 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "distilrubert-small-cased-conversational", - "RuBQReranking (rus-Cyrl)": 42.58 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "distilrubert-small-cased-conversational", - "RiaNewsRetrieval (rus-Cyrl)": 4.14, - "RuBQRetrieval (rus-Cyrl)": 10.6 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "distilrubert-small-cased-conversational", - "RUParaPhraserSTS (rus-Cyrl)": 55.01, - "RuSTSBenchmarkSTS (rus-Cyrl)": 61.72, - "STS22 (rus-Cyrl)": 51.87, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 61.6 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "distilrubert-small-cased-conversational" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "distilrubert-small-cased-conversational" - } - ] - } - }, - "gtr-t5-large": { - "BitextMining": { - "f1": [ - { - "Model": "gtr-t5-large" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "gtr-t5-large", - "AmazonCounterfactualClassification (de)": 59.38, - "AmazonCounterfactualClassification (en)": 70.03, - "AmazonCounterfactualClassification (en-ext)": 69.86, - "AmazonCounterfactualClassification (ja)": 45.87, - "AmazonPolarityClassification": 73.92, - "AmazonReviewsClassification (de)": 33.06, - "AmazonReviewsClassification (en)": 37.21, - "AmazonReviewsClassification (es)": 34.0, - "AmazonReviewsClassification (fr)": 33.48, - "AmazonReviewsClassification (ja)": 21.78, - "AmazonReviewsClassification (zh)": 21.83, - "Banking77Classification": 81.21, - "EmotionClassification": 46.33, - "ImdbClassification": 70.86, - "MTOPDomainClassification (de)": 81.91, - "MTOPDomainClassification (en)": 94.01, - "MTOPDomainClassification (es)": 84.7, - "MTOPDomainClassification (fr)": 82.48, - "MTOPDomainClassification (hi)": 22.11, - "MTOPDomainClassification (th)": 16.36, - "MTOPIntentClassification (de)": 52.13, - "MTOPIntentClassification (en)": 63.86, - "MTOPIntentClassification (es)": 52.62, - "MTOPIntentClassification (fr)": 46.39, - "MTOPIntentClassification (hi)": 3.9, - "MTOPIntentClassification (th)": 5.38, - "MassiveIntentClassification (af)": 41.02, - "MassiveIntentClassification (am)": 2.34, - "MassiveIntentClassification (ar)": 4.87, - "MassiveIntentClassification (az)": 34.92, - "MassiveIntentClassification (bn)": 2.52, - "MassiveIntentClassification (cy)": 35.87, - "MassiveIntentClassification (da)": 45.3, - "MassiveIntentClassification (de)": 51.48, - "MassiveIntentClassification (el)": 10.0, - "MassiveIntentClassification (en)": 70.06, - "MassiveIntentClassification (es)": 53.3, - "MassiveIntentClassification (fa)": 3.59, - "MassiveIntentClassification (fi)": 37.35, - "MassiveIntentClassification (fr)": 54.83, - "MassiveIntentClassification (he)": 2.52, - "MassiveIntentClassification (hi)": 2.88, - "MassiveIntentClassification (hu)": 33.52, - "MassiveIntentClassification (hy)": 3.13, - "MassiveIntentClassification (id)": 40.11, - "MassiveIntentClassification (is)": 34.77, - "MassiveIntentClassification (it)": 51.21, - "MassiveIntentClassification (ja)": 4.75, - "MassiveIntentClassification (jv)": 35.6, - "MassiveIntentClassification (ka)": 2.71, - "MassiveIntentClassification (km)": 5.48, - "MassiveIntentClassification (kn)": 2.44, - "MassiveIntentClassification (ko)": 2.59, - "MassiveIntentClassification (lv)": 38.15, - "MassiveIntentClassification (ml)": 2.67, - "MassiveIntentClassification (mn)": 18.47, - "MassiveIntentClassification (ms)": 35.58, - "MassiveIntentClassification (my)": 4.35, - "MassiveIntentClassification (nb)": 43.78, - "MassiveIntentClassification (nl)": 45.96, - "MassiveIntentClassification (pl)": 39.08, - "MassiveIntentClassification (pt)": 52.27, - "MassiveIntentClassification (ro)": 46.39, - "MassiveIntentClassification (ru)": 16.82, - "MassiveIntentClassification (sl)": 37.3, - "MassiveIntentClassification (sq)": 41.73, - "MassiveIntentClassification (sv)": 43.51, - "MassiveIntentClassification (sw)": 35.97, - "MassiveIntentClassification (ta)": 1.52, - "MassiveIntentClassification (te)": 2.57, - "MassiveIntentClassification (th)": 3.94, - "MassiveIntentClassification (tl)": 41.03, - "MassiveIntentClassification (tr)": 33.75, - "MassiveIntentClassification (ur)": 2.57, - "MassiveIntentClassification (vi)": 25.23, - "MassiveIntentClassification (zh-CN)": 2.41, - "MassiveIntentClassification (zh-TW)": 4.64, - "MassiveScenarioClassification (af)": 51.48, - "MassiveScenarioClassification (am)": 7.74, - "MassiveScenarioClassification (ar)": 12.03, - "MassiveScenarioClassification (az)": 41.77, - "MassiveScenarioClassification (bn)": 8.07, - "MassiveScenarioClassification (cy)": 43.67, - "MassiveScenarioClassification (da)": 54.88, - "MassiveScenarioClassification (de)": 63.63, - "MassiveScenarioClassification (el)": 16.83, - "MassiveScenarioClassification (en)": 75.49, - "MassiveScenarioClassification (es)": 61.48, - "MassiveScenarioClassification (fa)": 6.48, - "MassiveScenarioClassification (fi)": 43.54, - "MassiveScenarioClassification (fr)": 64.06, - "MassiveScenarioClassification (he)": 8.03, - "MassiveScenarioClassification (hi)": 7.5, - "MassiveScenarioClassification (hu)": 42.59, - "MassiveScenarioClassification (hy)": 9.22, - "MassiveScenarioClassification (id)": 48.67, - "MassiveScenarioClassification (is)": 43.87, - "MassiveScenarioClassification (it)": 59.83, - "MassiveScenarioClassification (ja)": 5.62, - "MassiveScenarioClassification (jv)": 42.18, - "MassiveScenarioClassification (ka)": 7.52, - "MassiveScenarioClassification (km)": 9.55, - "MassiveScenarioClassification (kn)": 8.34, - "MassiveScenarioClassification (ko)": 6.11, - "MassiveScenarioClassification (lv)": 43.35, - "MassiveScenarioClassification (ml)": 7.28, - "MassiveScenarioClassification (mn)": 23.94, - "MassiveScenarioClassification (ms)": 45.18, - "MassiveScenarioClassification (my)": 9.33, - "MassiveScenarioClassification (nb)": 52.71, - "MassiveScenarioClassification (nl)": 57.02, - "MassiveScenarioClassification (pl)": 46.79, - "MassiveScenarioClassification (pt)": 59.45, - "MassiveScenarioClassification (ro)": 56.8, - "MassiveScenarioClassification (ru)": 25.85, - "MassiveScenarioClassification (sl)": 42.51, - "MassiveScenarioClassification (sq)": 50.41, - "MassiveScenarioClassification (sv)": 54.16, - "MassiveScenarioClassification (sw)": 43.02, - "MassiveScenarioClassification (ta)": 7.21, - "MassiveScenarioClassification (te)": 6.9, - "MassiveScenarioClassification (th)": 8.7, - "MassiveScenarioClassification (tl)": 51.76, - "MassiveScenarioClassification (tr)": 42.54, - "MassiveScenarioClassification (ur)": 9.32, - "MassiveScenarioClassification (vi)": 31.51, - "MassiveScenarioClassification (zh-CN)": 3.84, - "MassiveScenarioClassification (zh-TW)": 8.16, - "ToxicConversationsClassification": 68.65, - "TweetSentimentExtractionClassification": 54.09 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "gtr-t5-large", - "ArxivClusteringP2P": 37.5, - "ArxivClusteringS2S": 30.55, - "BiorxivClusteringP2P": 29.59, - "BiorxivClusteringS2S": 25.72, - "MedrxivClusteringP2P": 28.72, - "MedrxivClusteringS2S": 27.39, - "RedditClustering": 61.69, - "RedditClusteringP2P": 61.67, - "StackExchangeClustering": 69.93, - "StackExchangeClusteringP2P": 33.21, - "TwentyNewsgroupsClustering": 51.64 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "gtr-t5-large", - "SprintDuplicateQuestions": 95.05, - "TwitterSemEval2015": 76.03, - "TwitterURLCorpus": 84.89 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "gtr-t5-large", - "AskUbuntuDupQuestions": 61.64, - "MindSmallReranking": 31.84, - "SciDocsRR": 76.39, - "StackOverflowDupQuestions": 51.58 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "gtr-t5-large", - "ArguAna": 52.09, - "CQADupstackRetrieval": 36.62, - "ClimateFEVER": 26.9, - "DBPedia": 39.55, - "FEVER": 72.66, - "FiQA2018": 42.79, - "HotpotQA": 57.85, - "MSMARCO": 42.73, - "NFCorpus": 32.63, - "NQ": 55.09, - "QuoraRetrieval": 88.47, - "SCIDOCS": 15.51, - "SciFact": 63.42, - "TRECCOVID": 56.68, - "Touche2020": 28.29 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "gtr-t5-large", - "BIOSSES": 84.86, - "SICK-R": 73.39, - "STS12": 70.33, - "STS13": 82.19, - "STS14": 77.16, - "STS15": 86.31, - "STS16": 81.85, - "STS17 (ar-ar)": 10.19, - "STS17 (en-ar)": -5.77, - "STS17 (en-de)": 67.43, - "STS17 (en-en)": 83.93, - "STS17 (en-tr)": 8.75, - "STS17 (es-en)": 54.96, - "STS17 (es-es)": 82.74, - "STS17 (fr-en)": 60.5, - "STS17 (it-en)": 46.26, - "STS17 (ko-ko)": 8.96, - "STS17 (nl-en)": 47.48, - "STS22 (ar)": 34.97, - "STS22 (de)": 51.7, - "STS22 (de-en)": 48.76, - "STS22 (de-fr)": 57.5, - "STS22 (de-pl)": 32.76, - "STS22 (en)": 64.3, - "STS22 (es)": 57.49, - "STS22 (es-en)": 67.76, - "STS22 (es-it)": 57.18, - "STS22 (fr)": 78.7, - "STS22 (fr-pl)": 61.98, - "STS22 (it)": 67.67, - "STS22 (pl)": 30.68, - "STS22 (pl-en)": 54.17, - "STS22 (ru)": 15.36, - "STS22 (tr)": 58.12, - "STS22 (zh)": 27.32, - "STS22 (zh-en)": 29.42, - "STSBenchmark": 77.6 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "gtr-t5-large", - "SummEval": 29.5 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "gtr-t5-large" - } - ] - } - }, - "allenai-specter": { - "BitextMining": { - "f1": [ - { - "Model": "allenai-specter" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "allenai-specter", - "AmazonCounterfactualClassification (de)": 54.46, - "AmazonCounterfactualClassification (en)": 58.7, - "AmazonCounterfactualClassification (en-ext)": 59.28, - "AmazonCounterfactualClassification (ja)": 43.87, - "AmazonPolarityClassification": 57.77, - "AmazonReviewsClassification (de)": 24.08, - "AmazonReviewsClassification (en)": 26.26, - "AmazonReviewsClassification (es)": 23.88, - "AmazonReviewsClassification (fr)": 23.31, - "AmazonReviewsClassification (ja)": 20.25, - "AmazonReviewsClassification (zh)": 20.49, - "Banking77Classification": 66.66, - "EmotionClassification": 24.82, - "ImdbClassification": 56.35, - "MTOPDomainClassification (de)": 48.55, - "MTOPDomainClassification (en)": 74.53, - "MTOPDomainClassification (es)": 58.39, - "MTOPDomainClassification (fr)": 54.61, - "MTOPDomainClassification (hi)": 21.22, - "MTOPDomainClassification (th)": 14.98, - "MTOPIntentClassification (de)": 35.55, - "MTOPIntentClassification (en)": 50.05, - "MTOPIntentClassification (es)": 36.72, - "MTOPIntentClassification (fr)": 34.71, - "MTOPIntentClassification (hi)": 4.44, - "MTOPIntentClassification (th)": 4.67, - "MassiveIntentClassification (af)": 33.68, - "MassiveIntentClassification (am)": 2.94, - "MassiveIntentClassification (ar)": 10.04, - "MassiveIntentClassification (az)": 30.74, - "MassiveIntentClassification (bn)": 3.02, - "MassiveIntentClassification (cy)": 33.94, - "MassiveIntentClassification (da)": 38.47, - "MassiveIntentClassification (de)": 36.06, - "MassiveIntentClassification (el)": 27.7, - "MassiveIntentClassification (en)": 51.73, - "MassiveIntentClassification (es)": 35.6, - "MassiveIntentClassification (fa)": 17.97, - "MassiveIntentClassification (fi)": 35.53, - "MassiveIntentClassification (fr)": 38.41, - "MassiveIntentClassification (he)": 2.69, - "MassiveIntentClassification (hi)": 3.43, - "MassiveIntentClassification (hu)": 34.05, - "MassiveIntentClassification (hy)": 3.11, - "MassiveIntentClassification (id)": 40.02, - "MassiveIntentClassification (is)": 32.63, - "MassiveIntentClassification (it)": 39.28, - "MassiveIntentClassification (ja)": 4.95, - "MassiveIntentClassification (jv)": 34.95, - "MassiveIntentClassification (ka)": 2.57, - "MassiveIntentClassification (km)": 4.73, - "MassiveIntentClassification (kn)": 3.54, - "MassiveIntentClassification (ko)": 2.68, - "MassiveIntentClassification (lv)": 37.91, - "MassiveIntentClassification (ml)": 2.88, - "MassiveIntentClassification (mn)": 16.94, - "MassiveIntentClassification (ms)": 36.6, - "MassiveIntentClassification (my)": 3.96, - "MassiveIntentClassification (nb)": 34.75, - "MassiveIntentClassification (nl)": 33.95, - "MassiveIntentClassification (pl)": 35.77, - "MassiveIntentClassification (pt)": 43.05, - "MassiveIntentClassification (ro)": 36.2, - "MassiveIntentClassification (ru)": 25.3, - "MassiveIntentClassification (sl)": 35.9, - "MassiveIntentClassification (sq)": 36.6, - "MassiveIntentClassification (sv)": 36.0, - "MassiveIntentClassification (sw)": 34.81, - "MassiveIntentClassification (ta)": 3.11, - "MassiveIntentClassification (te)": 2.53, - "MassiveIntentClassification (th)": 4.38, - "MassiveIntentClassification (tl)": 35.51, - "MassiveIntentClassification (tr)": 32.02, - "MassiveIntentClassification (ur)": 9.61, - "MassiveIntentClassification (vi)": 37.07, - "MassiveIntentClassification (zh-CN)": 2.81, - "MassiveIntentClassification (zh-TW)": 4.79, - "MassiveScenarioClassification (af)": 36.17, - "MassiveScenarioClassification (am)": 7.64, - "MassiveScenarioClassification (ar)": 15.26, - "MassiveScenarioClassification (az)": 30.73, - "MassiveScenarioClassification (bn)": 7.15, - "MassiveScenarioClassification (cy)": 34.73, - "MassiveScenarioClassification (da)": 39.93, - "MassiveScenarioClassification (de)": 38.62, - "MassiveScenarioClassification (el)": 27.18, - "MassiveScenarioClassification (en)": 58.58, - "MassiveScenarioClassification (es)": 39.44, - "MassiveScenarioClassification (fa)": 21.43, - "MassiveScenarioClassification (fi)": 33.21, - "MassiveScenarioClassification (fr)": 40.26, - "MassiveScenarioClassification (he)": 7.42, - "MassiveScenarioClassification (hi)": 8.06, - "MassiveScenarioClassification (hu)": 34.54, - "MassiveScenarioClassification (hy)": 8.61, - "MassiveScenarioClassification (id)": 40.04, - "MassiveScenarioClassification (is)": 33.57, - "MassiveScenarioClassification (it)": 40.1, - "MassiveScenarioClassification (ja)": 9.96, - "MassiveScenarioClassification (jv)": 36.11, - "MassiveScenarioClassification (ka)": 7.13, - "MassiveScenarioClassification (km)": 9.66, - "MassiveScenarioClassification (kn)": 7.55, - "MassiveScenarioClassification (ko)": 7.27, - "MassiveScenarioClassification (lv)": 37.03, - "MassiveScenarioClassification (ml)": 7.22, - "MassiveScenarioClassification (mn)": 21.53, - "MassiveScenarioClassification (ms)": 37.57, - "MassiveScenarioClassification (my)": 9.54, - "MassiveScenarioClassification (nb)": 35.71, - "MassiveScenarioClassification (nl)": 34.62, - "MassiveScenarioClassification (pl)": 36.87, - "MassiveScenarioClassification (pt)": 44.68, - "MassiveScenarioClassification (ro)": 37.29, - "MassiveScenarioClassification (ru)": 28.16, - "MassiveScenarioClassification (sl)": 37.95, - "MassiveScenarioClassification (sq)": 37.82, - "MassiveScenarioClassification (sv)": 35.35, - "MassiveScenarioClassification (sw)": 35.37, - "MassiveScenarioClassification (ta)": 7.19, - "MassiveScenarioClassification (te)": 7.29, - "MassiveScenarioClassification (th)": 9.47, - "MassiveScenarioClassification (tl)": 37.31, - "MassiveScenarioClassification (tr)": 34.57, - "MassiveScenarioClassification (ur)": 16.17, - "MassiveScenarioClassification (vi)": 35.91, - "MassiveScenarioClassification (zh-CN)": 9.19, - "MassiveScenarioClassification (zh-TW)": 10.19, - "ToxicConversationsClassification": 57.44, - "TweetSentimentExtractionClassification": 45.52 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "allenai-specter", - "ArxivClusteringP2P": 44.75, - "ArxivClusteringS2S": 35.27, - "BiorxivClusteringP2P": 39.52, - "BiorxivClusteringS2S": 34.53, - "MedrxivClusteringP2P": 35.04, - "MedrxivClusteringS2S": 31.66, - "RedditClustering": 24.13, - "RedditClusteringP2P": 35.06, - "StackExchangeClustering": 39.01, - "StackExchangeClusteringP2P": 31.46, - "TwentyNewsgroupsClustering": 24.22 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "allenai-specter", - "SprintDuplicateQuestions": 71.63, - "TwitterSemEval2015": 43.25, - "TwitterURLCorpus": 69.22 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "allenai-specter", - "AskUbuntuDupQuestions": 50.07, - "MindSmallReranking": 24.8, - "SciDocsRR": 81.31, - "StackOverflowDupQuestions": 36.22 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "allenai-specter", - "ArguAna": 32.67, - "CQADupstackRetrieval": 14.6, - "ClimateFEVER": 6.86, - "DBPedia": 4.14, - "FEVER": 5.45, - "FiQA2018": 5.64, - "HotpotQA": 5.46, - "MSMARCO": 5.59, - "NFCorpus": 0.85, - "NQ": 5.99, - "QuoraRetrieval": 64.65, - "SCIDOCS": 0.0, - "SciFact": 47.88, - "TRECCOVID": 29.91, - "Touche2020": 8.46 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "allenai-specter", - "BIOSSES": 64.95, - "SICK-R": 56.39, - "STS12": 62.49, - "STS13": 58.7, - "STS14": 54.87, - "STS15": 62.54, - "STS16": 64.27, - "STS17 (ar-ar)": 27.14, - "STS17 (en-ar)": 6.9, - "STS17 (en-de)": 11.59, - "STS17 (en-en)": 69.63, - "STS17 (en-tr)": 6.46, - "STS17 (es-en)": 10.86, - "STS17 (es-es)": 55.45, - "STS17 (fr-en)": 16.02, - "STS17 (it-en)": 19.87, - "STS17 (ko-ko)": 8.08, - "STS17 (nl-en)": 24.92, - "STS22 (ar)": 19.57, - "STS22 (de)": 17.31, - "STS22 (de-en)": 26.03, - "STS22 (de-fr)": 10.26, - "STS22 (de-pl)": 16.94, - "STS22 (en)": 55.06, - "STS22 (es)": 48.89, - "STS22 (es-en)": 51.79, - "STS22 (es-it)": 25.24, - "STS22 (fr)": 53.92, - "STS22 (fr-pl)": 39.44, - "STS22 (it)": 39.43, - "STS22 (pl)": 13.56, - "STS22 (pl-en)": 25.36, - "STS22 (ru)": 1.11, - "STS22 (tr)": 31.73, - "STS22 (zh)": 16.35, - "STS22 (zh-en)": 8.44, - "STSBenchmark": 61.26 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "allenai-specter", - "SummEval": 27.66 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "allenai-specter" - } - ] - } - }, - "bert-base-multilingual-uncased": { - "BitextMining": { - "f1": [ - { - "Model": "bert-base-multilingual-uncased" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "bert-base-multilingual-uncased", - "AmazonReviewsClassification (fr)": 29.02, - "MTOPDomainClassification (fr)": 64.49, - "MTOPIntentClassification (fr)": 39.4, - "MasakhaNEWSClassification (fra)": 75.69, - "MassiveIntentClassification (fr)": 38.01, - "MassiveScenarioClassification (fr)": 43.63 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "bert-base-multilingual-uncased", - "AlloProfClusteringP2P": 60.66, - "AlloProfClusteringS2S": 35.05, - "HALClusteringS2S": 20.9, - "MLSUMClusteringP2P": 43.5, - "MLSUMClusteringS2S": 30.99, - "MasakhaNEWSClusteringP2P (fra)": 49.71, - "MasakhaNEWSClusteringS2S (fra)": 42.23 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "bert-base-multilingual-uncased", - "OpusparcusPC (fr)": 87.43, - "PawsXPairClassification (fr)": 53.22 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "bert-base-multilingual-uncased", - "AlloprofReranking": 38.85, - "SyntecReranking": 66.4 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "bert-base-multilingual-uncased", - "AlloprofRetrieval": 5.51, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 2.87, - "SyntecRetrieval": 34.95, - "XPQARetrieval (fr)": 26.12 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "bert-base-multilingual-uncased", - "SICKFr": 58.26, - "STS22 (fr)": 56.47, - "STSBenchmarkMultilingualSTS (fr)": 54.97 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "bert-base-multilingual-uncased", - "SummEvalFr": 30.72 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "bert-base-multilingual-uncased" - } - ] - } - }, - "bge-m3": { - "BitextMining": { - "f1": [ - { - "Model": "bge-m3", - "Tatoeba (rus-Cyrl_eng-Latn)": 93.42 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "bge-m3", - "GeoreviewClassification (rus-Cyrl)": 48.27, - "HeadlineClassification (rus-Cyrl)": 70.32, - "InappropriatenessClassification (rus-Cyrl)": 59.87, - "KinopoiskClassification (rus-Cyrl)": 58.23, - "MassiveIntentClassification (rus-Cyrl)": 68.75, - "MassiveScenarioClassification (rus-Cyrl)": 73.42, - "RuReviewsClassification (rus-Cyrl)": 66.91, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 55.81, - "RuSciBenchOECDClassification (rus-Cyrl)": 42.57 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "bge-m3", - "GeoreviewClusteringP2P (rus-Cyrl)": 63.75, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 50.57, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 43.21 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "bge-m3", - "OpusparcusPC (rus-Cyrl)": 89.64, - "TERRa (rus-Cyrl)": 60.6 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "bge-m3", - "RuBQReranking (rus-Cyrl)": 74.02 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "bge-m3", - "ARCChallenge": 9.02, - "AlphaNLI": 24.73, - "HellaSwag": 25.67, - "LEMBNarrativeQARetrieval": 45.76, - "LEMBNeedleRetrieval": 40.25, - "LEMBPasskeyRetrieval": 46.0, - "LEMBQMSumRetrieval": 35.54, - "LEMBSummScreenFDRetrieval": 94.09, - "LEMBWikimQARetrieval": 77.73, - "PIQA": 22.93, - "Quail": 7.51, - "RARbCode": 38.8, - "RARbMath": 69.19, - "RiaNewsRetrieval (rus-Cyrl)": 82.98, - "RuBQRetrieval (rus-Cyrl)": 71.21, - "SIQA": 4.89, - "SpartQA": 7.49, - "TempReasonL1": 0.99, - "TempReasonL2Fact": 33.23, - "TempReasonL2Pure": 0.68, - "TempReasonL3Fact": 30.05, - "TempReasonL3Pure": 5.28, - "WinoGrande": 41.72 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "bge-m3", - "RUParaPhraserSTS (rus-Cyrl)": 74.9, - "RuSTSBenchmarkSTS (rus-Cyrl)": 79.87, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 79.27 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "bge-m3" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "bge-m3" - } - ] - } - }, - "sentence-camembert-base": { - "BitextMining": { - "f1": [ - { - "Model": "sentence-camembert-base" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "sentence-camembert-base", - "AmazonReviewsClassification (fr)": 36.03, - "MTOPDomainClassification (fr)": 77.1, - "MTOPIntentClassification (fr)": 43.44, - "MasakhaNEWSClassification (fra)": 70.36, - "MassiveIntentClassification (fr)": 51.59, - "MassiveScenarioClassification (fr)": 61.28 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "sentence-camembert-base", - "AlloProfClusteringP2P": 59.09, - "AlloProfClusteringS2S": 38.92, - "HALClusteringS2S": 20.22, - "MLSUMClusteringP2P": 35.98, - "MLSUMClusteringS2S": 27.05, - "MasakhaNEWSClusteringP2P (fra)": 36.03, - "MasakhaNEWSClusteringS2S (fra)": 30.77 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "sentence-camembert-base", - "OpusparcusPC (fr)": 92.05, - "PawsXPairClassification (fr)": 57.44 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "sentence-camembert-base", - "AlloprofReranking": 48.68, - "SyntecReranking": 79.75 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "sentence-camembert-base", - "AlloprofRetrieval": 21.94, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 13.36, - "SyntecRetrieval": 68.62, - "XPQARetrieval (fr)": 57.92 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "sentence-camembert-base", - "SICKFr": 74.18, - "STS22 (fr)": 77.54, - "STSBenchmarkMultilingualSTS (fr)": 81.64 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "sentence-camembert-base", - "SummEvalFr": 28.77 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "sentence-camembert-base" - } - ] - } - }, - "all-mpnet-base-v2": { - "BitextMining": { - "f1": [ - { - "Model": "all-mpnet-base-v2", - "BornholmBitextMining (dan-Latn)": 27.44, - "Tatoeba (pol-Latn_eng-Latn)": 4.09, - "Tatoeba (ita-Latn_eng-Latn)": 11.1, - "Tatoeba (cat-Latn_eng-Latn)": 9.44, - "Tatoeba (aze-Latn_eng-Latn)": 1.49, - "Tatoeba (eus-Latn_eng-Latn)": 3.94, - "Tatoeba (epo-Latn_eng-Latn)": 7.15, - "Tatoeba (lit-Latn_eng-Latn)": 1.02, - "Tatoeba (ast-Latn_eng-Latn)": 9.78, - "Tatoeba (bul-Cyrl_eng-Latn)": 0.35, - "Tatoeba (ceb-Latn_eng-Latn)": 4.41, - "Tatoeba (mkd-Cyrl_eng-Latn)": 0.0, - "Tatoeba (tzl-Latn_eng-Latn)": 3.55, - "Tatoeba (zsm-Latn_eng-Latn)": 4.75, - "Tatoeba (mhr-Cyrl_eng-Latn)": 0.17, - "Tatoeba (pam-Latn_eng-Latn)": 4.32, - "Tatoeba (amh-Ethi_eng-Latn)": 0.0, - "Tatoeba (slv-Latn_eng-Latn)": 3.73, - "Tatoeba (lvs-Latn_eng-Latn)": 2.98, - "Tatoeba (sqi-Latn_eng-Latn)": 3.45, - "Tatoeba (orv-Cyrl_eng-Latn)": 0.0, - "Tatoeba (vie-Latn_eng-Latn)": 4.96, - "Tatoeba (pes-Arab_eng-Latn)": 0.2, - "Tatoeba (por-Latn_eng-Latn)": 10.48, - "Tatoeba (dtp-Latn_eng-Latn)": 3.54, - "Tatoeba (yid-Hebr_eng-Latn)": 0.08, - "Tatoeba (isl-Latn_eng-Latn)": 3.86, - "Tatoeba (cha-Latn_eng-Latn)": 12.2, - "Tatoeba (ron-Latn_eng-Latn)": 7.34, - "Tatoeba (hye-Armn_eng-Latn)": 0.14, - "Tatoeba (mar-Deva_eng-Latn)": 0.11, - "Tatoeba (hin-Deva_eng-Latn)": 0.02, - "Tatoeba (kor-Hang_eng-Latn)": 0.32, - "Tatoeba (srp-Cyrl_eng-Latn)": 1.89, - "Tatoeba (csb-Latn_eng-Latn)": 4.19, - "Tatoeba (jpn-Jpan_eng-Latn)": 1.71, - "Tatoeba (ber-Tfng_eng-Latn)": 4.56, - "Tatoeba (wuu-Hans_eng-Latn)": 0.91, - "Tatoeba (jav-Latn_eng-Latn)": 3.17, - "Tatoeba (nob-Latn_eng-Latn)": 4.37, - "Tatoeba (bre-Latn_eng-Latn)": 3.65, - "Tatoeba (kzj-Latn_eng-Latn)": 3.62, - "Tatoeba (urd-Arab_eng-Latn)": 0.0, - "Tatoeba (ces-Latn_eng-Latn)": 3.56, - "Tatoeba (cbk-Latn_eng-Latn)": 9.33, - "Tatoeba (gla-Latn_eng-Latn)": 2.04, - "Tatoeba (war-Latn_eng-Latn)": 5.14, - "Tatoeba (swh-Latn_eng-Latn)": 6.01, - "Tatoeba (swg-Latn_eng-Latn)": 7.86, - "Tatoeba (glg-Latn_eng-Latn)": 12.0, - "Tatoeba (fao-Latn_eng-Latn)": 7.08, - "Tatoeba (gsw-Latn_eng-Latn)": 10.67, - "Tatoeba (rus-Cyrl_eng-Latn)": 0.14, - "Tatoeba (kaz-Cyrl_eng-Latn)": 0.52, - "Tatoeba (gle-Latn_eng-Latn)": 2.19, - "Tatoeba (slk-Latn_eng-Latn)": 3.4, - "Tatoeba (nno-Latn_eng-Latn)": 5.75, - "Tatoeba (cor-Latn_eng-Latn)": 2.42, - "Tatoeba (nov-Latn_eng-Latn)": 16.61, - "Tatoeba (swe-Latn_eng-Latn)": 6.55, - "Tatoeba (max-Deva_eng-Latn)": 6.46, - "Tatoeba (oci-Latn_eng-Latn)": 8.57, - "Tatoeba (lfn-Latn_eng-Latn)": 6.1, - "Tatoeba (fra-Latn_eng-Latn)": 16.9, - "Tatoeba (ben-Beng_eng-Latn)": 0.0, - "Tatoeba (bel-Cyrl_eng-Latn)": 0.65, - "Tatoeba (lat-Latn_eng-Latn)": 5.78, - "Tatoeba (cmn-Hans_eng-Latn)": 2.22, - "Tatoeba (kat-Geor_eng-Latn)": 0.43, - "Tatoeba (bos-Latn_eng-Latn)": 4.6, - "Tatoeba (xho-Latn_eng-Latn)": 3.3, - "Tatoeba (tha-Thai_eng-Latn)": 0.0, - "Tatoeba (cym-Latn_eng-Latn)": 4.88, - "Tatoeba (deu-Latn_eng-Latn)": 11.46, - "Tatoeba (awa-Deva_eng-Latn)": 0.44, - "Tatoeba (ido-Latn_eng-Latn)": 9.84, - "Tatoeba (tat-Cyrl_eng-Latn)": 0.24, - "Tatoeba (kab-Latn_eng-Latn)": 1.31, - "Tatoeba (uzb-Latn_eng-Latn)": 1.98, - "Tatoeba (heb-Hebr_eng-Latn)": 0.28, - "Tatoeba (ara-Arab_eng-Latn)": 0.1, - "Tatoeba (fry-Latn_eng-Latn)": 12.43, - "Tatoeba (afr-Latn_eng-Latn)": 6.08, - "Tatoeba (kur-Latn_eng-Latn)": 3.65, - "Tatoeba (pms-Latn_eng-Latn)": 7.63, - "Tatoeba (ell-Grek_eng-Latn)": 0.0, - "Tatoeba (spa-Latn_eng-Latn)": 10.12, - "Tatoeba (dsb-Latn_eng-Latn)": 2.96, - "Tatoeba (uig-Arab_eng-Latn)": 0.33, - "Tatoeba (nld-Latn_eng-Latn)": 9.29, - "Tatoeba (tel-Telu_eng-Latn)": 0.73, - "Tatoeba (hrv-Latn_eng-Latn)": 3.77, - "Tatoeba (nds-Latn_eng-Latn)": 10.96, - "Tatoeba (hun-Latn_eng-Latn)": 3.23, - "Tatoeba (est-Latn_eng-Latn)": 2.35, - "Tatoeba (mal-Mlym_eng-Latn)": 0.15, - "Tatoeba (khm-Khmr_eng-Latn)": 0.28, - "Tatoeba (hsb-Latn_eng-Latn)": 3.12, - "Tatoeba (tgl-Latn_eng-Latn)": 4.06, - "Tatoeba (ang-Latn_eng-Latn)": 9.77, - "Tatoeba (tur-Latn_eng-Latn)": 3.16, - "Tatoeba (tuk-Latn_eng-Latn)": 2.23, - "Tatoeba (ile-Latn_eng-Latn)": 17.84, - "Tatoeba (mon-Cyrl_eng-Latn)": 0.81, - "Tatoeba (yue-Hant_eng-Latn)": 1.16, - "Tatoeba (ina-Latn_eng-Latn)": 22.55, - "Tatoeba (tam-Taml_eng-Latn)": 0.73, - "Tatoeba (ukr-Cyrl_eng-Latn)": 0.5, - "Tatoeba (dan-Latn_eng-Latn)": 10.01, - "Tatoeba (arq-Arab_eng-Latn)": 0.33, - "Tatoeba (arz-Arab_eng-Latn)": 0.0, - "Tatoeba (fin-Latn_eng-Latn)": 3.82, - "Tatoeba (ind-Latn_eng-Latn)": 4.88 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "all-mpnet-base-v2", - "AllegroReviews (pol-Latn)": 22.99, - "AmazonCounterfactualClassification (en-ext)": 67.5, - "AmazonCounterfactualClassification (en)": 65.03, - "AmazonCounterfactualClassification (deu-Latn)": 55.66, - "AmazonCounterfactualClassification (jpn-Jpan)": 60.69, - "AmazonPolarityClassification": 67.14, - "AmazonReviewsClassification (en)": 31.44, - "AmazonReviewsClassification (deu-Latn)": 26.05, - "AmazonReviewsClassification (spa-Latn)": 27.73, - "AmazonReviewsClassification (fra-Latn)": 28.49, - "AmazonReviewsClassification (jpn-Jpan)": 23.65, - "AmazonReviewsClassification (cmn-Hans)": 23.62, - "AngryTweetsClassification (dan-Latn)": 44.13, - "Banking77Classification": 81.7, - "CBD (pol-Latn)": 50.25, - "DanishPoliticalCommentsClassification (dan-Latn)": 28.31, - "EmotionClassification": 42.22, - "GeoreviewClassification (rus-Cyrl)": 25.93, - "HeadlineClassification (rus-Cyrl)": 28.53, - "IFlyTek (cmn-Hans)": 17.18, - "ImdbClassification": 71.17, - "InappropriatenessClassification (rus-Cyrl)": 51.82, - "JDReview (cmn-Hans)": 60.19, - "KinopoiskClassification (rus-Cyrl)": 34.18, - "LccSentimentClassification (dan-Latn)": 39.27, - "MTOPDomainClassification (en)": 91.89, - "MTOPDomainClassification (deu-Latn)": 71.86, - "MTOPDomainClassification (spa-Latn)": 71.3, - "MTOPDomainClassification (fra-Latn)": 74.88, - "MTOPDomainClassification (hin-Deva)": 39.93, - "MTOPDomainClassification (tha-Thai)": 17.54, - "MTOPIntentClassification (en)": 68.27, - "MTOPIntentClassification (deu-Latn)": 44.36, - "MTOPIntentClassification (spa-Latn)": 39.48, - "MTOPIntentClassification (fra-Latn)": 37.57, - "MTOPIntentClassification (hin-Deva)": 18.63, - "MTOPIntentClassification (tha-Thai)": 5.42, - "MasakhaNEWSClassification (amh-Ethi)": 36.49, - "MasakhaNEWSClassification (eng)": 79.75, - "MasakhaNEWSClassification (fra-Latn)": 77.77, - "MasakhaNEWSClassification (hau-Latn)": 59.22, - "MasakhaNEWSClassification (ibo-Latn)": 61.64, - "MasakhaNEWSClassification (lin-Latn)": 74.0, - "MasakhaNEWSClassification (lug-Latn)": 58.43, - "MasakhaNEWSClassification (orm-Ethi)": 48.15, - "MasakhaNEWSClassification (pcm-Latn)": 92.2, - "MasakhaNEWSClassification (run-Latn)": 64.72, - "MasakhaNEWSClassification (sna-Latn)": 73.69, - "MasakhaNEWSClassification (som-Latn)": 49.97, - "MasakhaNEWSClassification (swa-Latn)": 55.15, - "MasakhaNEWSClassification (tir-Ethi)": 27.46, - "MasakhaNEWSClassification (xho-Latn)": 60.98, - "MasakhaNEWSClassification (yor-Latn)": 63.33, - "MassiveIntentClassification (en)": 69.76, - "MassiveIntentClassification (jav-Latn)": 31.75, - "MassiveIntentClassification (fra-Latn)": 44.27, - "MassiveIntentClassification (msa-Latn)": 30.53, - "MassiveIntentClassification (hun-Latn)": 34.38, - "MassiveIntentClassification (pol-Latn)": 34.26, - "MassiveIntentClassification (nld-Latn)": 38.49, - "MassiveIntentClassification (tha-Thai)": 8.51, - "MassiveIntentClassification (tur-Latn)": 32.02, - "MassiveIntentClassification (tam-Taml)": 9.25, - "MassiveIntentClassification (hye-Armn)": 10.11, - "MassiveIntentClassification (khm-Khmr)": 4.74, - "MassiveIntentClassification (lav-Latn)": 35.08, - "MassiveIntentClassification (deu-Latn)": 44.54, - "MassiveIntentClassification (spa-Latn)": 39.75, - "MassiveIntentClassification (ben-Beng)": 12.35, - "MassiveIntentClassification (por-Latn)": 42.83, - "MassiveIntentClassification (ara-Arab)": 20.42, - "MassiveIntentClassification (cym-Latn)": 30.82, - "MassiveIntentClassification (dan-Latn)": 42.36, - "MassiveIntentClassification (mya-Mymr)": 4.6, - "MassiveIntentClassification (heb-Hebr)": 23.6, - "MassiveIntentClassification (kan-Knda)": 3.76, - "MassiveIntentClassification (swa-Latn)": 31.82, - "MassiveIntentClassification (fas-Arab)": 22.45, - "MassiveIntentClassification (hin-Deva)": 17.68, - "MassiveIntentClassification (kat-Geor)": 7.66, - "MassiveIntentClassification (mal-Mlym)": 2.64, - "MassiveIntentClassification (fin-Latn)": 34.58, - "MassiveIntentClassification (slv-Latn)": 34.49, - "MassiveIntentClassification (afr-Latn)": 36.49, - "MassiveIntentClassification (urd-Arab)": 12.86, - "MassiveIntentClassification (ron-Latn)": 38.07, - "MassiveIntentClassification (sqi-Latn)": 37.26, - "MassiveIntentClassification (cmo-Hant)": 22.43, - "MassiveIntentClassification (ita-Latn)": 40.29, - "MassiveIntentClassification (ind-Latn)": 36.31, - "MassiveIntentClassification (nob-Latn)": 39.3, - "MassiveIntentClassification (jpn-Jpan)": 33.13, - "MassiveIntentClassification (aze-Latn)": 28.92, - "MassiveIntentClassification (mon-Cyrl)": 19.65, - "MassiveIntentClassification (ell-Grek)": 24.52, - "MassiveIntentClassification (rus-Cyrl)": 23.98, - "MassiveIntentClassification (kor-Kore)": 13.35, - "MassiveIntentClassification (cmo-Hans)": 24.36, - "MassiveIntentClassification (isl-Latn)": 31.46, - "MassiveIntentClassification (swe-Latn)": 39.02, - "MassiveIntentClassification (tel-Telu)": 2.26, - "MassiveIntentClassification (vie-Latn)": 31.47, - "MassiveIntentClassification (tgl-Latn)": 36.33, - "MassiveIntentClassification (amh-Ethi)": 2.39, - "MassiveScenarioClassification (en)": 75.67, - "MassiveScenarioClassification (tur-Latn)": 39.11, - "MassiveScenarioClassification (kat-Geor)": 13.45, - "MassiveScenarioClassification (jpn-Jpan)": 40.57, - "MassiveScenarioClassification (spa-Latn)": 50.92, - "MassiveScenarioClassification (fas-Arab)": 27.8, - "MassiveScenarioClassification (hun-Latn)": 41.01, - "MassiveScenarioClassification (jav-Latn)": 40.0, - "MassiveScenarioClassification (por-Latn)": 52.06, - "MassiveScenarioClassification (sqi-Latn)": 44.67, - "MassiveScenarioClassification (lav-Latn)": 39.28, - "MassiveScenarioClassification (deu-Latn)": 54.09, - "MassiveScenarioClassification (nld-Latn)": 47.79, - "MassiveScenarioClassification (mon-Cyrl)": 25.58, - "MassiveScenarioClassification (swa-Latn)": 40.34, - "MassiveScenarioClassification (ben-Beng)": 17.49, - "MassiveScenarioClassification (cym-Latn)": 34.82, - "MassiveScenarioClassification (swe-Latn)": 44.53, - "MassiveScenarioClassification (rus-Cyrl)": 28.71, - "MassiveScenarioClassification (fra-Latn)": 54.26, - "MassiveScenarioClassification (dan-Latn)": 49.45, - "MassiveScenarioClassification (mya-Mymr)": 10.8, - "MassiveScenarioClassification (ron-Latn)": 47.86, - "MassiveScenarioClassification (cmo-Hans)": 35.33, - "MassiveScenarioClassification (hin-Deva)": 23.13, - "MassiveScenarioClassification (cmo-Hant)": 31.7, - "MassiveScenarioClassification (afr-Latn)": 43.63, - "MassiveScenarioClassification (aze-Latn)": 36.42, - "MassiveScenarioClassification (msa-Latn)": 37.28, - "MassiveScenarioClassification (ell-Grek)": 33.85, - "MassiveScenarioClassification (isl-Latn)": 39.36, - "MassiveScenarioClassification (fin-Latn)": 38.41, - "MassiveScenarioClassification (ind-Latn)": 43.05, - "MassiveScenarioClassification (pol-Latn)": 42.66, - "MassiveScenarioClassification (tam-Taml)": 14.55, - "MassiveScenarioClassification (ita-Latn)": 51.37, - "MassiveScenarioClassification (urd-Arab)": 20.0, - "MassiveScenarioClassification (kan-Knda)": 8.34, - "MassiveScenarioClassification (tel-Telu)": 7.81, - "MassiveScenarioClassification (mal-Mlym)": 7.69, - "MassiveScenarioClassification (ara-Arab)": 27.8, - "MassiveScenarioClassification (kor-Kore)": 17.28, - "MassiveScenarioClassification (vie-Latn)": 35.9, - "MassiveScenarioClassification (amh-Ethi)": 7.43, - "MassiveScenarioClassification (heb-Hebr)": 25.49, - "MassiveScenarioClassification (hye-Armn)": 16.86, - "MassiveScenarioClassification (khm-Khmr)": 9.63, - "MassiveScenarioClassification (slv-Latn)": 39.88, - "MassiveScenarioClassification (tgl-Latn)": 47.04, - "MassiveScenarioClassification (nob-Latn)": 45.75, - "MassiveScenarioClassification (tha-Thai)": 17.01, - "MultilingualSentiment (cmn-Hans)": 41.2, - "NoRecClassification (nob-Latn)": 38.34, - "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 50.15, - "OnlineShopping (cmn-Hans)": 56.94, - "PAC (pol-Latn)": 62.1, - "PolEmo2.0-IN (pol-Latn)": 41.63, - "PolEmo2.0-OUT (pol-Latn)": 25.0, - "RuReviewsClassification (rus-Cyrl)": 42.33, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 13.29, - "RuSciBenchOECDClassification (rus-Cyrl)": 10.62, - "TNews (cmn-Hans)": 21.05, - "ToxicConversationsClassification": 61.05, - "TweetSentimentExtractionClassification": 55.05, - "Waimai (cmn-Hans)": 63.31 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "all-mpnet-base-v2", - "ArxivClusteringP2P": 48.38, - "ArxivClusteringS2S": 39.72, - "BiorxivClusteringP2P": 39.62, - "BiorxivClusteringS2S": 35.02, - "GeoreviewClusteringP2P (rus-Cyrl)": 20.33, - "MasakhaNEWSClusteringP2P (amh-Ethi)": 42.49, - "MasakhaNEWSClusteringP2P (eng)": 67.24, - "MasakhaNEWSClusteringP2P (fra-Latn)": 61.99, - "MasakhaNEWSClusteringP2P (hau-Latn)": 37.17, - "MasakhaNEWSClusteringP2P (ibo-Latn)": 52.0, - "MasakhaNEWSClusteringP2P (lin-Latn)": 69.68, - "MasakhaNEWSClusteringP2P (lug-Latn)": 50.96, - "MasakhaNEWSClusteringP2P (orm-Ethi)": 28.42, - "MasakhaNEWSClusteringP2P (pcm-Latn)": 64.01, - "MasakhaNEWSClusteringP2P (run-Latn)": 57.6, - "MasakhaNEWSClusteringP2P (sna-Latn)": 54.99, - "MasakhaNEWSClusteringP2P (som-Latn)": 31.16, - "MasakhaNEWSClusteringP2P (swa-Latn)": 28.29, - "MasakhaNEWSClusteringP2P (tir-Ethi)": 41.85, - "MasakhaNEWSClusteringP2P (xho-Latn)": 35.24, - "MasakhaNEWSClusteringP2P (yor-Latn)": 42.15, - "MasakhaNEWSClusteringS2S (amh-Ethi)": 44.48, - "MasakhaNEWSClusteringS2S (eng)": 35.69, - "MasakhaNEWSClusteringS2S (fra-Latn)": 41.05, - "MasakhaNEWSClusteringS2S (hau-Latn)": 16.64, - "MasakhaNEWSClusteringS2S (ibo-Latn)": 38.63, - "MasakhaNEWSClusteringS2S (lin-Latn)": 70.72, - "MasakhaNEWSClusteringS2S (lug-Latn)": 46.97, - "MasakhaNEWSClusteringS2S (orm-Ethi)": 23.85, - "MasakhaNEWSClusteringS2S (pcm-Latn)": 68.7, - "MasakhaNEWSClusteringS2S (run-Latn)": 52.27, - "MasakhaNEWSClusteringS2S (sna-Latn)": 47.64, - "MasakhaNEWSClusteringS2S (som-Latn)": 30.94, - "MasakhaNEWSClusteringS2S (swa-Latn)": 17.12, - "MasakhaNEWSClusteringS2S (tir-Ethi)": 42.01, - "MasakhaNEWSClusteringS2S (xho-Latn)": 24.16, - "MasakhaNEWSClusteringS2S (yor-Latn)": 35.04, - "MedrxivClusteringP2P": 35.58, - "MedrxivClusteringS2S": 32.87, - "RedditClustering": 54.82, - "RedditClusteringP2P": 56.77, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 14.66, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 12.49, - "StackExchangeClustering": 53.8, - "StackExchangeClusteringP2P": 34.28, - "TwentyNewsgroupsClustering": 49.74 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "all-mpnet-base-v2", - "CDSC-E (pol-Latn)": 45.37, - "OpusparcusPC (deu-Latn)": 89.78, - "OpusparcusPC (en)": 97.75, - "OpusparcusPC (fin-Latn)": 85.82, - "OpusparcusPC (fra-Latn)": 86.61, - "OpusparcusPC (rus-Cyrl)": 79.85, - "OpusparcusPC (swe-Latn)": 81.81, - "PSC (pol-Latn)": 83.28, - "PawsXPairClassification (deu-Latn)": 52.17, - "PawsXPairClassification (en)": 61.99, - "PawsXPairClassification (spa-Latn)": 55.06, - "PawsXPairClassification (fra-Latn)": 56.42, - "PawsXPairClassification (jpn-Hira)": 47.43, - "PawsXPairClassification (kor-Hang)": 49.75, - "PawsXPairClassification (cmn-Hans)": 52.47, - "SICK-E-PL (pol-Latn)": 46.51, - "SprintDuplicateQuestions": 90.15, - "TERRa (rus-Cyrl)": 44.52, - "TwitterSemEval2015": 73.85, - "TwitterURLCorpus": 85.11 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "all-mpnet-base-v2", - "AlloprofReranking (fra-Latn)": 69.63, - "AskUbuntuDupQuestions": 65.85, - "MMarcoReranking (cmn-Hans)": 4.65, - "MindSmallReranking": 30.97, - "RuBQReranking (rus-Cyrl)": 30.96, - "SciDocsRR": 88.65, - "StackOverflowDupQuestions": 51.98, - "SyntecReranking (fra-Latn)": 66.12, - "T2Reranking (cmn-Hans)": 58.3 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "all-mpnet-base-v2", - "AILACasedocs": 22.51, - "AILAStatutes": 21.27, - "ARCChallenge": 11.8, - "AlloprofRetrieval (fra-Latn)": 34.27, - "AlphaNLI": 22.41, - "ArguAna": 46.52, - "ArguAna-PL (pol-Latn)": 14.72, - "BSARDRetrieval (fra-Latn)": 6.98, - "BrightRetrieval (robotics)": 8.36, - "BrightRetrieval (psychology)": 22.63, - "BrightRetrieval (leetcode)": 26.4, - "BrightRetrieval (biology)": 15.52, - "BrightRetrieval (theoremqa_questions)": 18.49, - "BrightRetrieval (economics)": 16.64, - "BrightRetrieval (stackoverflow)": 9.48, - "BrightRetrieval (pony)": 6.95, - "BrightRetrieval (earth_science)": 20.11, - "BrightRetrieval (theoremqa_theorems)": 12.38, - "BrightRetrieval (sustainable_living)": 15.34, - "BrightRetrieval (aops)": 5.32, - "CQADupstackRetrieval": 44.96, - "ClimateFEVER": 21.97, - "CmedqaRetrieval (cmn-Hans)": 2.0, - "CovidRetrieval (cmn-Hans)": 3.7, - "DBPedia": 32.09, - "DuRetrieval (cmn-Hans)": 4.92, - "EcomRetrieval (cmn-Hans)": 3.94, - "FEVER": 50.86, - "FiQA-PL (pol-Latn)": 3.6, - "FiQA2018": 49.96, - "GerDaLIRSmall (deu-Latn)": 3.78, - "HellaSwag": 26.27, - "HotpotQA": 39.29, - "LEMBNarrativeQARetrieval": 19.34, - "LEMBNeedleRetrieval": 16.0, - "LEMBPasskeyRetrieval": 24.5, - "LEMBQMSumRetrieval": 21.54, - "LEMBSummScreenFDRetrieval": 60.43, - "LEMBWikimQARetrieval": 44.92, - "LeCaRDv2 (zho-Hans)": 18.09, - "LegalBenchConsumerContractsQA": 75.25, - "LegalBenchCorporateLobbying": 89.04, - "LegalQuAD (deu-Latn)": 10.67, - "LegalSummarization": 58.55, - "MMarcoRetrieval (cmn-Hans)": 7.13, - "MSMARCO": 39.75, - "MedicalRetrieval (cmn-Hans)": 1.71, - "MintakaRetrieval (ara-Arab)": 1.97, - "MintakaRetrieval (deu-Latn)": 17.21, - "MintakaRetrieval (spa-Latn)": 10.11, - "MintakaRetrieval (fra-Latn)": 12.93, - "MintakaRetrieval (hin-Deva)": 2.05, - "MintakaRetrieval (ita-Latn)": 5.63, - "MintakaRetrieval (jpn-Hira)": 6.72, - "MintakaRetrieval (por-Latn)": 8.05, - "NFCorpus": 33.29, - "NFCorpus-PL (pol-Latn)": 8.77, - "NQ": 50.45, - "PIQA": 29.03, - "Quail": 3.41, - "QuoraRetrieval": 87.46, - "RARbCode": 53.21, - "RARbMath": 71.85, - "RuBQRetrieval (rus-Cyrl)": 4.75, - "SCIDOCS": 23.76, - "SCIDOCS-PL (pol-Latn)": 4.02, - "SIQA": 2.38, - "SciFact": 65.57, - "SciFact-PL (pol-Latn)": 13.31, - "SpartQA": 0.22, - "SyntecRetrieval (fra-Latn)": 57.39, - "T2Retrieval (cmn-Hans)": 2.98, - "TRECCOVID": 51.33, - "TRECCOVID-PL (pol-Latn)": 12.12, - "TempReasonL1": 1.77, - "TempReasonL2Fact": 11.2, - "TempReasonL2Pure": 1.15, - "TempReasonL3Fact": 9.42, - "TempReasonL3Pure": 5.59, - "Touche2020": 19.93, - "VideoRetrieval (cmn-Hans)": 8.48, - "WinoGrande": 20.8, - "XPQARetrieval (ara-Arab_ara-Arab)": 9.42, - "XPQARetrieval (eng-Latn_ara-Arab)": 2.39, - "XPQARetrieval (ara-Arab_eng-Latn)": 8.98, - "XPQARetrieval (deu-Latn_deu-Latn)": 55.82, - "XPQARetrieval (eng-Latn_deu-Latn)": 11.74, - "XPQARetrieval (deu-Latn_eng-Latn)": 30.44, - "XPQARetrieval (spa-Latn_spa-Latn)": 40.01, - "XPQARetrieval (eng-Latn_spa-Latn)": 6.12, - "XPQARetrieval (spa-Latn_eng-Latn)": 29.44, - "XPQARetrieval (fra-Latn_fra-Latn)": 51.94, - "XPQARetrieval (eng-Latn_fra-Latn)": 11.48, - "XPQARetrieval (fra-Latn_eng-Latn)": 32.52, - "XPQARetrieval (hin-Deva_hin-Deva)": 37.48, - "XPQARetrieval (eng-Latn_hin-Deva)": 5.11, - "XPQARetrieval (hin-Deva_eng-Latn)": 7.37, - "XPQARetrieval (ita-Latn_ita-Latn)": 54.2, - "XPQARetrieval (eng-Latn_ita-Latn)": 6.08, - "XPQARetrieval (ita-Latn_eng-Latn)": 30.32, - "XPQARetrieval (jpn-Hira_jpn-Hira)": 37.45, - "XPQARetrieval (eng-Latn_jpn-Hira)": 5.79, - "XPQARetrieval (jpn-Hira_eng-Latn)": 14.77, - "XPQARetrieval (kor-Hang_kor-Hang)": 10.4, - "XPQARetrieval (eng-Latn_kor-Hang)": 7.09, - "XPQARetrieval (kor-Hang_eng-Latn)": 6.95, - "XPQARetrieval (pol-Latn_pol-Latn)": 23.67, - "XPQARetrieval (eng-Latn_pol-Latn)": 8.83, - "XPQARetrieval (pol-Latn_eng-Latn)": 15.94, - "XPQARetrieval (por-Latn_por-Latn)": 33.56, - "XPQARetrieval (eng-Latn_por-Latn)": 3.76, - "XPQARetrieval (por-Latn_eng-Latn)": 23.45, - "XPQARetrieval (tam-Taml_tam-Taml)": 5.53, - "XPQARetrieval (eng-Latn_tam-Taml)": 3.3, - "XPQARetrieval (tam-Taml_eng-Latn)": 4.0, - "XPQARetrieval (cmn-Hans_cmn-Hans)": 23.84, - "XPQARetrieval (eng-Latn_cmn-Hans)": 7.2, - "XPQARetrieval (cmn-Hans_eng-Latn)": 12.84 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "all-mpnet-base-v2", - "AFQMC (cmn-Hans)": 8.01, - "ATEC (cmn-Hans)": 14.03, - "BIOSSES": 80.43, - "BQ (cmn-Hans)": 21.39, - "CDSC-R (pol-Latn)": 77.04, - "LCQMC (cmn-Hans)": 22.84, - "PAWSX (cmn-Hans)": 6.44, - "RUParaPhraserSTS (rus-Cyrl)": 42.15, - "RuSTSBenchmarkSTS (rus-Cyrl)": 55.68, - "SICK-R": 80.59, - "SICK-R-PL (pol-Latn)": 50.2, - "SICKFr (fra-Latn)": 67.05, - "STS12": 72.63, - "STS13": 83.48, - "STS14": 78.0, - "STS15": 85.66, - "STS16": 80.03, - "STS17 (en-en)": 90.6, - "STS17 (eng-Latn_ara-Arab)": 6.76, - "STS17 (fra-Latn_eng-Latn)": 41.64, - "STS17 (eng-Latn_tur-Latn)": -4.58, - "STS17 (eng-Latn_deu-Latn)": 35.5, - "STS17 (spa-Latn_eng-Latn)": 25.28, - "STS17 (ita-Latn_eng-Latn)": 31.8, - "STS17 (spa-Latn)": 78.4, - "STS17 (kor-Hang)": 39.11, - "STS17 (ara-Arab)": 55.42, - "STS17 (nld-Latn_eng-Latn)": 32.89, - "STS22 (en)": 68.39, - "STS22 (spa-Latn_eng-Latn)": 55.09, - "STS22 (deu-Latn_pol-Latn)": 23.53, - "STS22 (cmn-Hans_eng-Latn)": 40.47, - "STS22 (pol-Latn)": 24.21, - "STS22 (tur-Latn)": 29.35, - "STS22 (spa-Latn_ita-Latn)": 41.61, - "STS22 (fra-Latn_pol-Latn)": 73.25, - "STS22 (rus-Cyrl)": 15.83, - "STS22 (deu-Latn)": 27.0, - "STS22 (spa-Latn)": 55.98, - "STS22 (pol-Latn_eng-Latn)": 51.07, - "STS22 (fra-Latn)": 77.1, - "STS22 (deu-Latn_eng-Latn)": 49.73, - "STS22 (ara-Arab)": 38.96, - "STS22 (deu-Latn_fra-Latn)": 31.39, - "STS22 (ita-Latn)": 58.02, - "STS22 (cmn-Hans)": 42.24, - "STSB (cmn-Hans)": 37.7, - "STSBenchmark": 83.42, - "STSBenchmarkMultilingualSTS (nld-Latn)": 57.01, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 55.54, - "STSBenchmarkMultilingualSTS (fra-Latn)": 65.15, - "STSBenchmarkMultilingualSTS (ita-Latn)": 62.72, - "STSBenchmarkMultilingualSTS (spa-Latn)": 65.78, - "STSBenchmarkMultilingualSTS (en)": 83.42, - "STSBenchmarkMultilingualSTS (deu-Latn)": 61.43, - "STSBenchmarkMultilingualSTS (por-Latn)": 62.12, - "STSBenchmarkMultilingualSTS (cmn-Hans)": 39.43, - "STSBenchmarkMultilingualSTS (pol-Latn)": 52.36 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "all-mpnet-base-v2", - "SummEval": 27.49, - "SummEvalFr (fra-Latn)": 28.11 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "all-mpnet-base-v2" - } - ] - } - }, - "glove.6B.300d": { - "BitextMining": { - "f1": [ - { - "Model": "glove.6B.300d", - "BUCC (de-en)": 0.18, - "BUCC (fr-en)": 0.19, - "BUCC (ru-en)": 0.1, - "BUCC (zh-en)": 0.0 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "glove.6B.300d", - "AmazonCounterfactualClassification (en)": 56.91, - "AmazonPolarityClassification": 60.32, - "AmazonReviewsClassification (en)": 29.67, - "Banking77Classification": 67.69, - "EmotionClassification": 36.93, - "ImdbClassification": 62.57, - "MTOPDomainClassification (en)": 79.11, - "MTOPIntentClassification (en)": 55.85, - "MassiveIntentClassification (en)": 56.19, - "MassiveScenarioClassification (en)": 66.03, - "ToxicConversationsClassification": 65.4, - "TweetSentimentExtractionClassification": 50.8 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "glove.6B.300d", - "ArxivClusteringP2P": 32.56, - "ArxivClusteringS2S": 23.14, - "BiorxivClusteringP2P": 29.27, - "BiorxivClusteringS2S": 19.18, - "MedrxivClusteringP2P": 26.12, - "MedrxivClusteringS2S": 20.38, - "RedditClustering": 28.46, - "RedditClusteringP2P": 35.82, - "StackExchangeClustering": 35.8, - "StackExchangeClusteringP2P": 28.51, - "TwentyNewsgroupsClustering": 25.83 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "glove.6B.300d", - "SprintDuplicateQuestions": 86.96, - "TwitterSemEval2015": 48.45, - "TwitterURLCorpus": 77.35 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "glove.6B.300d", - "AskUbuntuDupQuestions": 49.57, - "MindSmallReranking": 27.01, - "SciDocsRR": 62.56, - "StackOverflowDupQuestions": 34.03 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "glove.6B.300d", - "ArguAna": 36.3, - "CQADupstackRetrieval": 15.47, - "ClimateFEVER": 14.44, - "DBPedia": 18.28, - "FEVER": 14.99, - "FiQA2018": 10.09, - "HotpotQA": 19.18, - "MSMARCO": 9.6, - "NFCorpus": 13.87, - "NQ": 12.87, - "QuoraRetrieval": 71.32, - "SCIDOCS": 8.04, - "SciFact": 29.58, - "TRECCOVID": 36.22, - "Touche2020": 13.99 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "glove.6B.300d", - "BIOSSES": 44.93, - "SICK-R": 55.43, - "STS12": 54.64, - "STS13": 69.16, - "STS14": 60.81, - "STS15": 72.31, - "STS16": 65.34, - "STS17 (en-en)": 77.95, - "STS22 (en)": 56.35, - "STSBenchmark": 61.54 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "glove.6B.300d", - "SummEval": 28.87 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "glove.6B.300d" - } - ] - } - }, - "google-gecko.text-embedding-preview-0409": { - "BitextMining": { - "f1": [ - { - "Model": "google-gecko.text-embedding-preview-0409" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "google-gecko.text-embedding-preview-0409", - "AmazonCounterfactualClassification (en)": 75.34, - "AmazonPolarityClassification": 97.34, - "AmazonReviewsClassification (en)": 51.17, - "Banking77Classification": 88.62, - "EmotionClassification": 52.51, - "ImdbClassification": 95.65, - "MTOPDomainClassification (en)": 98.35, - "MTOPIntentClassification (en)": 83.43, - "MassiveIntentClassification (en)": 80.22, - "MassiveScenarioClassification (en)": 87.19, - "ToxicConversationsClassification": 89.67, - "TweetSentimentExtractionClassification": 74.52 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "google-gecko.text-embedding-preview-0409", - "ArxivClusteringP2P": 46.27, - "ArxivClusteringS2S": 38.36, - "BiorxivClusteringP2P": 37.87, - "BiorxivClusteringS2S": 35.67, - "MedrxivClusteringP2P": 33.11, - "MedrxivClusteringS2S": 31.54, - "RedditClustering": 65.81, - "RedditClusteringP2P": 66.62, - "StackExchangeClustering": 74.52, - "StackExchangeClusteringP2P": 37.63, - "TwentyNewsgroupsClustering": 54.87 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "google-gecko.text-embedding-preview-0409", - "SprintDuplicateQuestions": 96.26, - "TwitterSemEval2015": 79.04, - "TwitterURLCorpus": 87.53 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "google-gecko.text-embedding-preview-0409", - "AskUbuntuDupQuestions": 64.4, - "MindSmallReranking": 33.07, - "SciDocsRR": 83.59, - "StackOverflowDupQuestions": 54.56 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "google-gecko.text-embedding-preview-0409", - "ArguAna": 62.18, - "BrightRetrieval (earth_science)": 34.38, - "BrightRetrieval (leetcode)": 29.64, - "BrightRetrieval (theoremqa_questions)": 21.51, - "BrightRetrieval (aops)": 9.33, - "BrightRetrieval (sustainable_living)": 17.25, - "BrightRetrieval (pony)": 3.59, - "BrightRetrieval (theoremqa_theorems)": 16.77, - "BrightRetrieval (stackoverflow)": 17.93, - "BrightRetrieval (biology)": 22.98, - "BrightRetrieval (robotics)": 15.98, - "BrightRetrieval (economics)": 19.5, - "BrightRetrieval (psychology)": 27.86, - "CQADupstackRetrieval": 48.89, - "ClimateFEVER": 33.21, - "DBPedia": 47.12, - "FEVER": 86.96, - "FiQA2018": 59.24, - "HotpotQA": 71.33, - "MSMARCO": 32.58, - "NFCorpus": 40.33, - "NQ": 61.28, - "QuoraRetrieval": 88.18, - "SCIDOCS": 20.34, - "SciFact": 75.42, - "TRECCOVID": 82.62, - "Touche2020": 25.86 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "google-gecko.text-embedding-preview-0409", - "BIOSSES": 89.46, - "SICK-R": 81.93, - "STS12": 77.59, - "STS13": 90.36, - "STS14": 85.25, - "STS15": 89.66, - "STS16": 87.34, - "STS17 (en-en)": 92.06, - "STS22 (en)": 68.02, - "STSBenchmark": 88.99 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "google-gecko.text-embedding-preview-0409", - "SummEval": 32.63 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "google-gecko.text-embedding-preview-0409", - "Core17InstructionRetrieval": 5.44, - "News21InstructionRetrieval": 3.94, - "Robust04InstructionRetrieval": -2.4 - } - ] - } - }, - "flaubert_base_cased": { - "BitextMining": { - "f1": [ - { - "Model": "flaubert_base_cased" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "flaubert_base_cased", - "AmazonReviewsClassification (fr)": 24.9, - "MTOPDomainClassification (fr)": 25.55, - "MTOPIntentClassification (fr)": 9.49, - "MasakhaNEWSClassification (fra)": 71.14, - "MassiveIntentClassification (fr)": 6.98, - "MassiveScenarioClassification (fr)": 11.41 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "flaubert_base_cased", - "AlloProfClusteringP2P": 52.86, - "AlloProfClusteringS2S": 14.46, - "HALClusteringS2S": 3.85, - "MLSUMClusteringP2P": 39.06, - "MLSUMClusteringS2S": 17.13, - "MasakhaNEWSClusteringP2P (fra)": 41.61, - "MasakhaNEWSClusteringS2S (fra)": 21.26 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "flaubert_base_cased", - "OpusparcusPC (fr)": 82.15, - "PawsXPairClassification (fr)": 51.89 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "flaubert_base_cased", - "AlloprofReranking": 34.81, - "SyntecReranking": 55.88 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "flaubert_base_cased", - "AlloprofRetrieval": 1.63, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 0.58, - "SyntecRetrieval": 20.56, - "XPQARetrieval (fr)": 6.59 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "flaubert_base_cased", - "SICKFr": 53.86, - "STS22 (fr)": 65.37, - "STSBenchmarkMultilingualSTS (fr)": 37.14 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "flaubert_base_cased", - "SummEvalFr": 31.26 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "flaubert_base_cased" - } - ] - } - }, - "LLM2Vec-Mistral-supervised": { - "BitextMining": { - "f1": [ - { - "Model": "LLM2Vec-Mistral-supervised" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "LLM2Vec-Mistral-supervised", - "AmazonCounterfactualClassification (en)": 77.58, - "AmazonPolarityClassification": 91.12, - "AmazonReviewsClassification (en)": 49.97, - "Banking77Classification": 88.31, - "EmotionClassification": 52.04, - "ImdbClassification": 87.42, - "MTOPDomainClassification (en)": 96.04, - "MTOPIntentClassification (en)": 84.77, - "MassiveIntentClassification (en)": 79.29, - "MassiveScenarioClassification (en)": 81.64, - "ToxicConversationsClassification": 69.26, - "TweetSentimentExtractionClassification": 62.14 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "LLM2Vec-Mistral-supervised", - "ArxivClusteringP2P": 42.81, - "ArxivClusteringS2S": 44.24, - "BiorxivClusteringP2P": 34.27, - "BiorxivClusteringS2S": 35.53, - "MedrxivClusteringP2P": 31.07, - "MedrxivClusteringS2S": 31.27, - "RedditClustering": 60.24, - "RedditClusteringP2P": 64.12, - "StackExchangeClustering": 70.73, - "StackExchangeClusteringP2P": 34.5, - "TwentyNewsgroupsClustering": 52.18 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "LLM2Vec-Mistral-supervised", - "SprintDuplicateQuestions": 96.82, - "TwitterSemEval2015": 80.6, - "TwitterURLCorpus": 86.56 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "LLM2Vec-Mistral-supervised", - "AskUbuntuDupQuestions": 63.98, - "MindSmallReranking": 31.5, - "SciDocsRR": 83.8, - "StackOverflowDupQuestions": 54.41 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "LLM2Vec-Mistral-supervised", - "ArguAna": 57.48, - "CQADupstackRetrieval": 48.84, - "ClimateFEVER": 35.19, - "DBPedia": 49.58, - "FEVER": 89.4, - "FiQA2018": 53.11, - "HotpotQA": 74.07, - "MSMARCO": 42.17, - "NFCorpus": 39.33, - "NQ": 61.7, - "QuoraRetrieval": 87.75, - "SCIDOCS": 22.5, - "SciFact": 78.86, - "TRECCOVID": 77.69, - "Touche2020": 22.18 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "LLM2Vec-Mistral-supervised", - "BIOSSES": 85.24, - "SICK-R": 83.7, - "STS12": 78.8, - "STS13": 86.37, - "STS14": 84.04, - "STS15": 88.99, - "STS16": 87.22, - "STS17 (en-en)": 90.19, - "STS22 (en)": 67.68, - "STSBenchmark": 88.65 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "LLM2Vec-Mistral-supervised", - "SummEval": 29.96 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "LLM2Vec-Mistral-supervised" - } - ] - } - }, - "bge-large-en-v1.5": { - "BitextMining": { - "f1": [ - { - "Model": "bge-large-en-v1.5" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "bge-large-en-v1.5" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "bge-large-en-v1.5" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "bge-large-en-v1.5" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "bge-large-en-v1.5" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "bge-large-en-v1.5", - "AILACasedocs": 25.15, - "AILAStatutes": 20.74, - "ARCChallenge": 9.99, - "AlphaNLI": 13.13, - "BrightRetrieval (stackoverflow)": 9.51, - "BrightRetrieval (earth_science)": 24.15, - "BrightRetrieval (aops)": 6.08, - "BrightRetrieval (sustainable_living)": 13.27, - "BrightRetrieval (psychology)": 17.44, - "BrightRetrieval (robotics)": 12.21, - "BrightRetrieval (theoremqa_theorems)": 5.51, - "BrightRetrieval (pony)": 5.64, - "BrightRetrieval (biology)": 11.96, - "BrightRetrieval (theoremqa_questions)": 12.56, - "BrightRetrieval (leetcode)": 26.68, - "BrightRetrieval (economics)": 16.59, - "GerDaLIRSmall": 3.96, - "HellaSwag": 28.5, - "LeCaRDv2": 22.68, - "LegalBenchConsumerContractsQA": 73.52, - "LegalBenchCorporateLobbying": 91.51, - "LegalQuAD": 16.22, - "LegalSummarization": 59.99, - "PIQA": 27.99, - "Quail": 1.83, - "RARbCode": 48.12, - "RARbMath": 57.36, - "SIQA": 1.04, - "SpartQA": 2.99, - "TempReasonL1": 1.46, - "TempReasonL2Fact": 24.25, - "TempReasonL2Pure": 2.35, - "TempReasonL3Fact": 20.64, - "TempReasonL3Pure": 6.67, - "WinoGrande": 19.18 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "bge-large-en-v1.5" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "bge-large-en-v1.5" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "bge-large-en-v1.5" - } - ] - } - }, - "LLM2Vec-Llama-2-unsupervised": { - "BitextMining": { - "f1": [ - { - "Model": "LLM2Vec-Llama-2-unsupervised" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "LLM2Vec-Llama-2-unsupervised", - "AmazonCounterfactualClassification (en)": 76.91, - "AmazonPolarityClassification": 79.05, - "AmazonReviewsClassification (en)": 40.08, - "Banking77Classification": 84.65, - "EmotionClassification": 46.58, - "ImdbClassification": 75.68, - "MTOPDomainClassification (en)": 94.33, - "MTOPIntentClassification (en)": 79.54, - "MassiveIntentClassification (en)": 73.84, - "MassiveScenarioClassification (en)": 79.17, - "ToxicConversationsClassification": 71.81, - "TweetSentimentExtractionClassification": 57.17 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "LLM2Vec-Llama-2-unsupervised", - "ArxivClusteringP2P": 47.81, - "ArxivClusteringS2S": 40.53, - "BiorxivClusteringP2P": 38.12, - "BiorxivClusteringS2S": 31.25, - "MedrxivClusteringP2P": 30.94, - "MedrxivClusteringS2S": 28.04, - "RedditClustering": 42.84, - "RedditClusteringP2P": 60.1, - "StackExchangeClustering": 65.12, - "StackExchangeClusteringP2P": 33.61, - "TwentyNewsgroupsClustering": 30.76 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "LLM2Vec-Llama-2-unsupervised", - "SprintDuplicateQuestions": 87.57, - "TwitterSemEval2015": 65.14, - "TwitterURLCorpus": 80.94 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "LLM2Vec-Llama-2-unsupervised", - "AskUbuntuDupQuestions": 55.56, - "MindSmallReranking": 30.86, - "SciDocsRR": 77.62, - "StackOverflowDupQuestions": 47.77 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "LLM2Vec-Llama-2-unsupervised", - "ArguAna": 47.09, - "CQADupstackRetrieval": 30.78, - "ClimateFEVER": 20.67, - "DBPedia": 25.81, - "FEVER": 43.48, - "FiQA2018": 24.62, - "HotpotQA": 48.46, - "MSMARCO": 18.81, - "NFCorpus": 26.81, - "NQ": 33.21, - "QuoraRetrieval": 86.15, - "SCIDOCS": 10.0, - "SciFact": 64.48, - "TRECCOVID": 60.67, - "Touche2020": 10.18 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "LLM2Vec-Llama-2-unsupervised", - "BIOSSES": 82.41, - "SICK-R": 71.77, - "STS12": 65.39, - "STS13": 79.26, - "STS14": 72.98, - "STS15": 82.72, - "STS16": 81.02, - "STS17 (en-en)": 86.7, - "STS22 (en)": 63.47, - "STSBenchmark": 78.32 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "LLM2Vec-Llama-2-unsupervised", - "SummEval": 31.38 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "LLM2Vec-Llama-2-unsupervised" - } - ] - } - }, - "GritLM-7B-noinstruct": { - "BitextMining": { - "f1": [ - { - "Model": "GritLM-7B-noinstruct" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "GritLM-7B-noinstruct" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "GritLM-7B-noinstruct" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "GritLM-7B-noinstruct" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "GritLM-7B-noinstruct" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "GritLM-7B-noinstruct", - "ARCChallenge": 16.57, - "AlphaNLI": 29.56, - "HellaSwag": 36.03, - "PIQA": 35.8, - "Quail": 8.68, - "RARbCode": 83.14, - "RARbMath": 83.01, - "SIQA": 5.73, - "SpartQA": 1.56, - "TempReasonL1": 2.57, - "TempReasonL2Fact": 48.25, - "TempReasonL2Pure": 8.98, - "TempReasonL3Fact": 34.11, - "TempReasonL3Pure": 12.44, - "WinoGrande": 52.12 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "GritLM-7B-noinstruct" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "GritLM-7B-noinstruct" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "GritLM-7B-noinstruct" - } - ] - } - }, - "voyage-law-2": { - "BitextMining": { - "f1": [ - { - "Model": "voyage-law-2" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "voyage-law-2", - "AmazonReviewsClassification (fr)": 41.98, - "MTOPDomainClassification (fr)": 90.12, - "MTOPIntentClassification (fr)": 62.44, - "MasakhaNEWSClassification (fra)": 76.42, - "MassiveIntentClassification (fr)": 66.94, - "MassiveScenarioClassification (fr)": 72.78 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "voyage-law-2", - "AlloProfClusteringP2P": 62.5, - "AlloProfClusteringS2S": 44.28, - "HALClusteringS2S": 26.36, - "MLSUMClusteringP2P (fr)": 44.03, - "MLSUMClusteringS2S (fr)": 42.95, - "MasakhaNEWSClusteringP2P (fra)": 50.68, - "MasakhaNEWSClusteringS2S (fra)": 38.79 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "voyage-law-2", - "OpusparcusPC (fr)": 93.06, - "PawsXPairClassification (fr)": 61.54 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "voyage-law-2", - "AlloprofReranking": 72.92, - "SyntecReranking": 91.2 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "voyage-law-2", - "AILACasedocs": 44.56, - "AILAStatutes": 45.51, - "AlloprofRetrieval": 57.28, - "BSARDRetrieval": 11.83, - "GerDaLIRSmall": 44.91, - "LEMBNarrativeQARetrieval": 55.78, - "LEMBNeedleRetrieval": 80.5, - "LEMBPasskeyRetrieval": 93.75, - "LEMBQMSumRetrieval": 57.26, - "LEMBSummScreenFDRetrieval": 98.72, - "LEMBWikimQARetrieval": 87.08, - "LeCaRDv2": 72.75, - "LegalBenchConsumerContractsQA": 83.27, - "LegalBenchCorporateLobbying": 95.66, - "LegalQuAD": 67.47, - "LegalSummarization": 68.96, - "MintakaRetrieval (fr)": 34.92, - "SyntecRetrieval": 87.33, - "XPQARetrieval (fr)": 73.56 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "voyage-law-2", - "SICKFr": 74.09, - "STS22 (fr)": 83.75, - "STSBenchmarkMultilingualSTS (fr)": 83.02 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "voyage-law-2", - "SummEvalFr": 30.34 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "voyage-law-2" - } - ] - } - }, - "Cohere-embed-english-v3.0": { - "BitextMining": { - "f1": [ - { - "Model": "Cohere-embed-english-v3.0" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "Cohere-embed-english-v3.0" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "Cohere-embed-english-v3.0" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "Cohere-embed-english-v3.0" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "Cohere-embed-english-v3.0" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "Cohere-embed-english-v3.0", - "AILACasedocs": 31.54, - "AILAStatutes": 27.15, - "ARCChallenge": 9.89, - "AlphaNLI": 15.1, - "BrightRetrieval (psychology)": 21.82, - "BrightRetrieval (economics)": 20.18, - "BrightRetrieval (robotics)": 16.21, - "BrightRetrieval (biology)": 18.98, - "BrightRetrieval (stackoverflow)": 16.47, - "BrightRetrieval (theoremqa_theorems)": 6.04, - "BrightRetrieval (pony)": 1.77, - "BrightRetrieval (sustainable_living)": 17.69, - "BrightRetrieval (aops)": 6.46, - "BrightRetrieval (theoremqa_questions)": 15.07, - "BrightRetrieval (leetcode)": 26.78, - "BrightRetrieval (earth_science)": 27.45, - "GerDaLIRSmall": 6.05, - "HellaSwag": 26.35, - "LeCaRDv2": 21.02, - "LegalBenchConsumerContractsQA": 77.12, - "LegalBenchCorporateLobbying": 93.68, - "LegalQuAD": 26.08, - "LegalSummarization": 61.7, - "PIQA": 28.49, - "Quail": 4.1, - "RARbCode": 57.19, - "RARbMath": 72.26, - "SIQA": 4.26, - "SpartQA": 3.75, - "TempReasonL1": 1.5, - "TempReasonL2Fact": 35.91, - "TempReasonL2Pure": 1.89, - "TempReasonL3Fact": 27.51, - "TempReasonL3Pure": 8.53, - "WinoGrande": 58.01 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "Cohere-embed-english-v3.0" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "Cohere-embed-english-v3.0" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "Cohere-embed-english-v3.0", - "Core17InstructionRetrieval": 2.8, - "News21InstructionRetrieval": 0.2, - "Robust04InstructionRetrieval": -3.63 - } - ] - } - }, - "Baichuan-text-embedding": { - "BitextMining": { - "f1": [ - { - "Model": "Baichuan-text-embedding" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "Baichuan-text-embedding", - "AmazonReviewsClassification (zh)": 48.3, - "IFlyTek": 50.75, - "JDReview": 87.69, - "MassiveIntentClassification (zh-CN)": 74.91, - "MassiveScenarioClassification (zh-CN)": 81.28, - "MultilingualSentiment": 76.83, - "OnlineShopping": 94.42, - "TNews": 52.62, - "Waimai": 88.77 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "Baichuan-text-embedding", - "CLSClusteringP2P": 60.37, - "CLSClusteringS2S": 51.09, - "ThuNewsClusteringP2P": 58.23, - "ThuNewsClusteringS2S": 57.83 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "Baichuan-text-embedding", - "Cmnli": 85.31, - "Ocnli": 79.33 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "Baichuan-text-embedding", - "CMedQAv1": 88.06, - "CMedQAv2": 88.46, - "MMarcoReranking": 34.3, - "T2Reranking": 67.85 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "Baichuan-text-embedding", - "CmedqaRetrieval": 47.64, - "CovidRetrieval": 86.86, - "DuRetrieval": 88.43, - "EcomRetrieval": 66.39, - "MMarcoRetrieval": 80.17, - "MedicalRetrieval": 61.1, - "T2Retrieval": 80.11, - "VideoRetrieval": 74.28 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "Baichuan-text-embedding", - "AFQMC": 50.8, - "ATEC": 53.23, - "BQ": 66.49, - "LCQMC": 76.6, - "PAWSX": 47.56, - "QBQTC": 39.96, - "STS22 (zh)": 65.78, - "STSB": 80.14 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "Baichuan-text-embedding" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "Baichuan-text-embedding" - } - ] - } - }, - "flaubert_large_cased": { - "BitextMining": { - "f1": [ - { - "Model": "flaubert_large_cased" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "flaubert_large_cased", - "AmazonReviewsClassification (fr)": 22.45, - "MTOPDomainClassification (fr)": 24.27, - "MTOPIntentClassification (fr)": 9.79, - "MasakhaNEWSClassification (fra)": 55.64, - "MassiveIntentClassification (fr)": 16.41, - "MassiveScenarioClassification (fr)": 22.72 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "flaubert_large_cased", - "AlloProfClusteringP2P": 40.85, - "AlloProfClusteringS2S": 21.76, - "HALClusteringS2S": 5.26, - "MLSUMClusteringP2P": 38.09, - "MLSUMClusteringS2S": 18.71, - "MasakhaNEWSClusteringP2P (fra)": 26.43, - "MasakhaNEWSClusteringS2S (fra)": 24.68 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "flaubert_large_cased", - "OpusparcusPC (fr)": 74.78, - "PawsXPairClassification (fr)": 54.14 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "flaubert_large_cased", - "AlloprofReranking": 26.29, - "SyntecReranking": 42.8 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "flaubert_large_cased", - "AlloprofRetrieval": 0.58, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 0.26, - "SyntecRetrieval": 1.58, - "XPQARetrieval (fr)": 3.69 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "flaubert_large_cased", - "SICKFr": 34.6, - "STS22 (fr)": 48.52, - "STSBenchmarkMultilingualSTS (fr)": 15.66 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "flaubert_large_cased", - "SummEvalFr": 29.25 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "flaubert_large_cased" - } - ] - } - }, - "dragon-plus": { - "BitextMining": { - "f1": [ - { - "Model": "dragon-plus" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "dragon-plus" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "dragon-plus" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "dragon-plus" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "dragon-plus" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "dragon-plus", - "ARCChallenge": 8.91, - "AlphaNLI": 32.1, - "HellaSwag": 27.69, - "PIQA": 28.01, - "Quail": 4.09, - "RARbCode": 17.58, - "RARbMath": 45.09, - "SIQA": 2.0, - "SpartQA": 10.34, - "TempReasonL1": 1.82, - "TempReasonL2Fact": 17.45, - "TempReasonL2Pure": 0.55, - "TempReasonL3Fact": 15.71, - "TempReasonL3Pure": 7.97, - "WinoGrande": 67.18 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "dragon-plus" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "dragon-plus" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "dragon-plus" - } - ] - } - }, - "text2vec-base-multilingual": { - "BitextMining": { - "f1": [ - { - "Model": "text2vec-base-multilingual" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "text2vec-base-multilingual", - "AmazonReviewsClassification (fr)": 34.25, - "MTOPDomainClassification (fr)": 71.83, - "MTOPIntentClassification (fr)": 44.53, - "MasakhaNEWSClassification (fra)": 73.84, - "MassiveIntentClassification (fr)": 51.93, - "MassiveScenarioClassification (fr)": 58.31 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "text2vec-base-multilingual", - "AlloProfClusteringP2P": 49.11, - "AlloProfClusteringS2S": 32.72, - "HALClusteringS2S": 16.19, - "MLSUMClusteringP2P": 36.19, - "MLSUMClusteringS2S": 30.39, - "MasakhaNEWSClusteringP2P (fra)": 38.51, - "MasakhaNEWSClusteringS2S (fra)": 32.51 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "text2vec-base-multilingual", - "OpusparcusPC (fr)": 92.04, - "PawsXPairClassification (fr)": 65.57 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "text2vec-base-multilingual", - "AlloprofReranking": 51.48, - "SyntecReranking": 70.28 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "text2vec-base-multilingual", - "AlloprofRetrieval": 18.9, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 14.81, - "SyntecRetrieval": 49.69, - "XPQARetrieval (fr)": 40.4 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "text2vec-base-multilingual", - "SICKFr": 77.25, - "STS22 (fr)": 74.1, - "STSBenchmarkMultilingualSTS (fr)": 83.48 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "text2vec-base-multilingual", - "SummEvalFr": 29.33 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "text2vec-base-multilingual" - } - ] - } - }, - "mistral-embed": { - "BitextMining": { - "f1": [ - { - "Model": "mistral-embed" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "mistral-embed", - "AmazonReviewsClassification (fr)": 41.59, - "MTOPDomainClassification (fr)": 90.05, - "MTOPIntentClassification (fr)": 66.09, - "MasakhaNEWSClassification (fra)": 81.4, - "MassiveIntentClassification (fr)": 62.83, - "MassiveScenarioClassification (fr)": 69.71 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "mistral-embed", - "AlloProfClusteringP2P": 62.01, - "AlloProfClusteringS2S": 49.2, - "HALClusteringS2S": 26.17, - "MLSUMClusteringP2P": 45.28, - "MLSUMClusteringS2S": 42.74, - "MasakhaNEWSClusteringP2P (fra)": 48.13, - "MasakhaNEWSClusteringS2S (fra)": 39.62 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "mistral-embed", - "OpusparcusPC (fr)": 92.61, - "PawsXPairClassification (fr)": 62.02 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "mistral-embed", - "AlloprofReranking": 72.36, - "SyntecReranking": 88.57 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "mistral-embed", - "AILACasedocs": 38.2, - "AILAStatutes": 44.81, - "AlloprofRetrieval": 56.84, - "BSARDRetrieval": 2.48, - "GerDaLIRSmall": 17.85, - "LeCaRDv2": 61.12, - "LegalBenchConsumerContractsQA": 80.8, - "LegalBenchCorporateLobbying": 94.11, - "LegalQuAD": 47.17, - "LegalSummarization": 67.39, - "MintakaRetrieval (fr)": 21.73, - "SyntecRetrieval": 78.77, - "XPQARetrieval (fr)": 74.24 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "mistral-embed", - "SICKFr": 76.21, - "STS22 (fr)": 82.74, - "STSBenchmarkMultilingualSTS (fr)": 79.72 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "mistral-embed", - "SummEvalFr": 31.47 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "mistral-embed" - } - ] - } - }, - "text-similarity-davinci-001": { - "BitextMining": { - "f1": [ - { - "Model": "text-similarity-davinci-001" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "text-similarity-davinci-001" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "text-similarity-davinci-001", - "RedditClustering": 31.78, - "StackExchangeClustering": 36.86, - "TwentyNewsgroupsClustering": 29.33 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "text-similarity-davinci-001", - "SprintDuplicateQuestions": 69.52, - "TwitterSemEval2015": 74.42, - "TwitterURLCorpus": 83.75 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "text-similarity-davinci-001", - "AskUbuntuDupQuestions": 53.56, - "SciDocsRR": 68.7, - "StackOverflowDupQuestions": 39.41 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "text-similarity-davinci-001" - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "text-similarity-davinci-001", - "BIOSSES": 68.95, - "SICK-R": 78.72, - "STSBenchmark": 84.08 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "text-similarity-davinci-001" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "text-similarity-davinci-001" - } - ] - } - }, - "monot5-base-msmarco-10k": { - "BitextMining": { - "f1": [ - { - "Model": "monot5-base-msmarco-10k" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "monot5-base-msmarco-10k" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "monot5-base-msmarco-10k" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "monot5-base-msmarco-10k" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "monot5-base-msmarco-10k" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "monot5-base-msmarco-10k" - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "monot5-base-msmarco-10k" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "monot5-base-msmarco-10k" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "monot5-base-msmarco-10k", - "Core17InstructionRetrieval": -4.06, - "News21InstructionRetrieval": 5.02, - "Robust04InstructionRetrieval": -6.2 - } - ] - } - }, - "nomic-embed-text-v1.5-512": { - "BitextMining": { - "f1": [ - { - "Model": "nomic-embed-text-v1.5-512" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "nomic-embed-text-v1.5-512", - "AmazonCounterfactualClassification (en)": 74.27, - "AmazonPolarityClassification": 91.89, - "AmazonReviewsClassification (en)": 46.97, - "Banking77Classification": 84.15, - "EmotionClassification": 47.73, - "ImdbClassification": 85.47, - "MTOPDomainClassification (en)": 92.62, - "MTOPIntentClassification (en)": 74.27, - "MassiveIntentClassification (en)": 73.07, - "MassiveScenarioClassification (en)": 76.82, - "ToxicConversationsClassification": 71.25, - "TweetSentimentExtractionClassification": 60.4 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "nomic-embed-text-v1.5-512", - "ArxivClusteringP2P": 45.45, - "ArxivClusteringS2S": 36.19, - "BiorxivClusteringP2P": 38.41, - "BiorxivClusteringS2S": 32.28, - "MedrxivClusteringP2P": 34.47, - "MedrxivClusteringS2S": 31.43, - "RedditClustering": 55.9, - "RedditClusteringP2P": 60.58, - "StackExchangeClustering": 62.94, - "StackExchangeClusteringP2P": 33.81, - "TwentyNewsgroupsClustering": 49.36 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "nomic-embed-text-v1.5-512", - "SprintDuplicateQuestions": 92.91, - "TwitterSemEval2015": 74.3, - "TwitterURLCorpus": 86.57 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "nomic-embed-text-v1.5-512", - "AskUbuntuDupQuestions": 61.6, - "MindSmallReranking": 30.34, - "SciDocsRR": 80.33, - "StackOverflowDupQuestions": 50.32 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "nomic-embed-text-v1.5-512", - "ArguAna": 47.45, - "CQADupstackRetrieval": 39.06, - "ClimateFEVER": 40.7, - "DBPedia": 42.96, - "FEVER": 85.7, - "FiQA2018": 36.92, - "HotpotQA": 71.48, - "MSMARCO": 42.29, - "NFCorpus": 33.31, - "NQ": 58.83, - "QuoraRetrieval": 87.87, - "SCIDOCS": 17.88, - "SciFact": 70.12, - "TRECCOVID": 82.12, - "Touche2020": 29.24 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "nomic-embed-text-v1.5-512", - "BIOSSES": 83.3, - "SICK-R": 79.27, - "STS12": 78.3, - "STS13": 85.81, - "STS14": 81.38, - "STS15": 86.79, - "STS16": 84.56, - "STS17 (en-en)": 87.25, - "STS22 (en)": 65.24, - "STSBenchmark": 85.14 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "nomic-embed-text-v1.5-512", - "SummEval": 30.47 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "nomic-embed-text-v1.5-512" - } - ] - } - }, - "bge-base-zh-v1.5": { - "BitextMining": { - "f1": [ - { - "Model": "bge-base-zh-v1.5" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "bge-base-zh-v1.5", - "AmazonReviewsClassification (zh)": 40.15, - "IFlyTek": 48.62, - "JDReview": 83.62, - "MassiveIntentClassification (zh-CN)": 67.93, - "MassiveScenarioClassification (zh-CN)": 73.98, - "MultilingualSentiment": 70.67, - "OnlineShopping": 91.26, - "TNews": 51.08, - "Waimai": 85.36 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "bge-base-zh-v1.5", - "CLSClusteringP2P": 39.91, - "CLSClusteringS2S": 37.63, - "ThuNewsClusteringP2P": 58.45, - "ThuNewsClusteringS2S": 54.12 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "bge-base-zh-v1.5", - "Cmnli": 84.1, - "Ocnli": 75.41 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "bge-base-zh-v1.5", - "CMedQAv1": 80.47, - "CMedQAv2": 84.88, - "MMarcoReranking": 29.74, - "T2Reranking": 66.49 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "bge-base-zh-v1.5", - "CmedqaRetrieval": 41.61, - "CovidRetrieval": 74.7, - "DuRetrieval": 85.07, - "EcomRetrieval": 64.25, - "MMarcoRetrieval": 77.69, - "MedicalRetrieval": 56.51, - "T2Retrieval": 83.71, - "VideoRetrieval": 72.35 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "bge-base-zh-v1.5", - "AFQMC": 42.4, - "ATEC": 48.17, - "BQ": 61.78, - "LCQMC": 74.45, - "PAWSX": 20.4, - "QBQTC": 36.22, - "STS22 (zh)": 68.01, - "STSB": 78.31 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "bge-base-zh-v1.5" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "bge-base-zh-v1.5" - } - ] - } - }, - "komninos": { - "BitextMining": { - "f1": [ - { - "Model": "komninos", - "BUCC (de-en)": 0.18, - "BUCC (fr-en)": 0.08, - "BUCC (ru-en)": 0.15, - "BUCC (zh-en)": 0.05, - "Tatoeba (afr-eng)": 4.82, - "Tatoeba (amh-eng)": 1.18, - "Tatoeba (ang-eng)": 8.54, - "Tatoeba (ara-eng)": 0.63, - "Tatoeba (arq-eng)": 0.4, - "Tatoeba (arz-eng)": 0.63, - "Tatoeba (ast-eng)": 11.69, - "Tatoeba (awa-eng)": 0.0, - "Tatoeba (aze-eng)": 3.22, - "Tatoeba (bel-eng)": 1.75, - "Tatoeba (ben-eng)": 0.2, - "Tatoeba (ber-eng)": 7.0, - "Tatoeba (bos-eng)": 9.31, - "Tatoeba (bre-eng)": 4.17, - "Tatoeba (bul-eng)": 1.29, - "Tatoeba (cat-eng)": 7.73, - "Tatoeba (cbk-eng)": 5.61, - "Tatoeba (ceb-eng)": 4.88, - "Tatoeba (ces-eng)": 3.55, - "Tatoeba (cha-eng)": 19.29, - "Tatoeba (cmn-eng)": 0.5, - "Tatoeba (cor-eng)": 4.15, - "Tatoeba (csb-eng)": 5.69, - "Tatoeba (cym-eng)": 8.4, - "Tatoeba (dan-eng)": 6.99, - "Tatoeba (deu-eng)": 3.67, - "Tatoeba (dsb-eng)": 5.33, - "Tatoeba (dtp-eng)": 4.25, - "Tatoeba (ell-eng)": 0.63, - "Tatoeba (epo-eng)": 2.45, - "Tatoeba (est-eng)": 2.69, - "Tatoeba (eus-eng)": 4.69, - "Tatoeba (fao-eng)": 7.61, - "Tatoeba (fin-eng)": 3.36, - "Tatoeba (fra-eng)": 7.0, - "Tatoeba (fry-eng)": 12.36, - "Tatoeba (gla-eng)": 3.07, - "Tatoeba (gle-eng)": 4.81, - "Tatoeba (glg-eng)": 8.12, - "Tatoeba (gsw-eng)": 18.87, - "Tatoeba (heb-eng)": 0.68, - "Tatoeba (hin-eng)": 0.1, - "Tatoeba (hrv-eng)": 5.41, - "Tatoeba (hsb-eng)": 6.32, - "Tatoeba (hun-eng)": 3.42, - "Tatoeba (hye-eng)": 0.97, - "Tatoeba (ido-eng)": 7.1, - "Tatoeba (ile-eng)": 13.61, - "Tatoeba (ina-eng)": 8.57, - "Tatoeba (ind-eng)": 7.26, - "Tatoeba (isl-eng)": 4.09, - "Tatoeba (ita-eng)": 5.54, - "Tatoeba (jav-eng)": 11.43, - "Tatoeba (jpn-eng)": 0.2, - "Tatoeba (kab-eng)": 2.71, - "Tatoeba (kat-eng)": 1.11, - "Tatoeba (kaz-eng)": 1.17, - "Tatoeba (khm-eng)": 0.55, - "Tatoeba (kor-eng)": 0.5, - "Tatoeba (kur-eng)": 8.55, - "Tatoeba (kzj-eng)": 4.61, - "Tatoeba (lat-eng)": 4.07, - "Tatoeba (lfn-eng)": 2.83, - "Tatoeba (lit-eng)": 0.95, - "Tatoeba (lvs-eng)": 3.25, - "Tatoeba (mal-eng)": 0.29, - "Tatoeba (mar-eng)": 0.2, - "Tatoeba (max-eng)": 14.53, - "Tatoeba (mhr-eng)": 0.2, - "Tatoeba (mkd-eng)": 0.2, - "Tatoeba (mon-eng)": 1.1, - "Tatoeba (nds-eng)": 10.37, - "Tatoeba (nld-eng)": 9.5, - "Tatoeba (nno-eng)": 4.49, - "Tatoeba (nob-eng)": 4.95, - "Tatoeba (nov-eng)": 14.53, - "Tatoeba (oci-eng)": 5.8, - "Tatoeba (orv-eng)": 0.24, - "Tatoeba (pam-eng)": 6.65, - "Tatoeba (pes-eng)": 0.5, - "Tatoeba (pms-eng)": 8.05, - "Tatoeba (pol-eng)": 5.13, - "Tatoeba (por-eng)": 5.87, - "Tatoeba (ron-eng)": 6.76, - "Tatoeba (rus-eng)": 0.2, - "Tatoeba (slk-eng)": 4.23, - "Tatoeba (slv-eng)": 6.05, - "Tatoeba (spa-eng)": 5.03, - "Tatoeba (sqi-eng)": 4.36, - "Tatoeba (srp-eng)": 1.77, - "Tatoeba (swe-eng)": 6.72, - "Tatoeba (swg-eng)": 8.54, - "Tatoeba (swh-eng)": 11.49, - "Tatoeba (tam-eng)": 1.3, - "Tatoeba (tat-eng)": 0.77, - "Tatoeba (tel-eng)": 0.85, - "Tatoeba (tgl-eng)": 2.61, - "Tatoeba (tha-eng)": 0.69, - "Tatoeba (tuk-eng)": 5.76, - "Tatoeba (tur-eng)": 5.24, - "Tatoeba (tzl-eng)": 15.51, - "Tatoeba (uig-eng)": 0.6, - "Tatoeba (ukr-eng)": 1.23, - "Tatoeba (urd-eng)": 0.4, - "Tatoeba (uzb-eng)": 4.73, - "Tatoeba (vie-eng)": 6.55, - "Tatoeba (war-eng)": 4.12, - "Tatoeba (wuu-eng)": 0.2, - "Tatoeba (xho-eng)": 4.33, - "Tatoeba (yid-eng)": 0.59, - "Tatoeba (yue-eng)": 0.5, - "Tatoeba (zsm-eng)": 7.27 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "komninos", - "AmazonCounterfactualClassification (en)": 60.54, - "AmazonPolarityClassification": 59.59, - "AmazonReviewsClassification (en)": 31.01, - "Banking77Classification": 67.05, - "EmotionClassification": 33.18, - "ImdbClassification": 63.98, - "MTOPDomainClassification (en)": 78.57, - "MTOPIntentClassification (en)": 57.07, - "MassiveIntentClassification (en)": 57.21, - "MassiveScenarioClassification (en)": 66.11, - "ToxicConversationsClassification": 67.76, - "TweetSentimentExtractionClassification": 49.68 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "komninos", - "ArxivClusteringP2P": 34.73, - "ArxivClusteringS2S": 26.01, - "BiorxivClusteringP2P": 29.76, - "BiorxivClusteringS2S": 20.71, - "BlurbsClusteringP2P": 11.37, - "BlurbsClusteringS2S": 8.01, - "MedrxivClusteringP2P": 26.65, - "MedrxivClusteringS2S": 21.5, - "RedditClustering": 28.84, - "RedditClusteringP2P": 7.37, - "StackExchangeClustering": 39.04, - "StackExchangeClusteringP2P": 30.23, - "TenKGnadClusteringP2P": 15.89, - "TenKGnadClusteringS2S": 4.84, - "TwentyNewsgroupsClustering": 27.42 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "komninos", - "SprintDuplicateQuestions": 85.55, - "TwitterSemEval2015": 53.85, - "TwitterURLCorpus": 79.41 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "komninos", - "AskUbuntuDupQuestions": 50.88, - "MindSmallReranking": 28.92, - "SciDocsRR": 63.55, - "StackOverflowDupQuestions": 35.65 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "komninos", - "ArguAna": 30.96, - "CQADupstackRetrieval": 16.79, - "ClimateFEVER": 14.87, - "DBPedia": 15.88, - "FEVER": 15.56, - "FiQA2018": 10.49, - "HotpotQA": 20.77, - "MSMARCO": 9.75, - "NFCorpus": 11.79, - "NQ": 12.75, - "QuoraRetrieval": 71.57, - "SCIDOCS": 8.47, - "SciFact": 29.53, - "TRECCOVID": 35.92, - "Touche2020": 13.17 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "komninos", - "BIOSSES": 50.25, - "SICK-R": 55.49, - "STS12": 53.51, - "STS13": 70.8, - "STS14": 63.56, - "STS15": 74.08, - "STS16": 64.6, - "STS17 (ar-ar)": 13.78, - "STS17 (en-ar)": 9.08, - "STS17 (en-de)": -3.11, - "STS17 (en-en)": 76.91, - "STS17 (en-tr)": -0.45, - "STS17 (es-en)": -8.18, - "STS17 (es-es)": 48.23, - "STS17 (fr-en)": 5.81, - "STS17 (it-en)": 3.64, - "STS17 (ko-ko)": 2.54, - "STS17 (nl-en)": 0.44, - "STS22 (ar)": 32.42, - "STS22 (de)": 33.04, - "STS22 (de-en)": 28.65, - "STS22 (de-fr)": 14.77, - "STS22 (de-pl)": 11.21, - "STS22 (en)": 53.89, - "STS22 (es)": 48.53, - "STS22 (es-en)": 26.97, - "STS22 (es-it)": 41.1, - "STS22 (fr)": 49.43, - "STS22 (fr-pl)": 39.44, - "STS22 (it)": 57.77, - "STS22 (pl)": 12.47, - "STS22 (pl-en)": 45.55, - "STS22 (ru)": 19.44, - "STS22 (tr)": 47.38, - "STS22 (zh)": 4.78, - "STS22 (zh-en)": 14.05, - "STSBenchmark": 61.55 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "komninos", - "SummEval": 30.49 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "komninos" - } - ] - } - }, - "text-search-ada-001": { - "BitextMining": { - "f1": [ - { - "Model": "text-search-ada-001" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "text-search-ada-001" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "text-search-ada-001", - "BiorxivClusteringS2S": 26.05, - "MedrxivClusteringS2S": 25.67, - "TwentyNewsgroupsClustering": 44.92 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "text-search-ada-001" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "text-search-ada-001" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "text-search-ada-001", - "ArguAna": 46.91, - "ClimateFEVER": 18.5, - "DBPedia": 36.2, - "FEVER": 72.1, - "FiQA2018": 38.41, - "HotpotQA": 59.39, - "MSMARCO": 37.94, - "NFCorpus": 33.17, - "NQ": 42.81, - "QuoraRetrieval": 70.57, - "SCIDOCS": 14.83, - "SciFact": 67.25, - "TRECCOVID": 72.43, - "Touche2020": 28.68 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "text-search-ada-001" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "text-search-ada-001" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "text-search-ada-001" - } - ] - } - }, - "text-embedding-3-small-instruct": { - "BitextMining": { - "f1": [ - { - "Model": "text-embedding-3-small-instruct" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "text-embedding-3-small-instruct" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "text-embedding-3-small-instruct" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "text-embedding-3-small-instruct" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "text-embedding-3-small-instruct" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "text-embedding-3-small-instruct", - "ARCChallenge": 13.76, - "AlphaNLI": 21.14, - "HellaSwag": 27.2, - "PIQA": 29.59, - "Quail": 6.64, - "RARbCode": 72.14, - "RARbMath": 64.31, - "SIQA": 2.98, - "SpartQA": 3.58, - "TempReasonL1": 2.29, - "TempReasonL2Fact": 26.34, - "TempReasonL2Pure": 3.17, - "TempReasonL3Fact": 22.72, - "TempReasonL3Pure": 9.98, - "WinoGrande": 25.49 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "text-embedding-3-small-instruct" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "text-embedding-3-small-instruct" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "text-embedding-3-small-instruct" - } - ] - } - }, - "gte-Qwen1.5-7B-instruct": { - "BitextMining": { - "f1": [ - { - "Model": "gte-Qwen1.5-7B-instruct" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "gte-Qwen1.5-7B-instruct", - "AmazonCounterfactualClassification (en)": 83.16, - "AmazonPolarityClassification": 96.7, - "AmazonReviewsClassification (en)": 62.17, - "AmazonReviewsClassification (zh)": 52.95, - "Banking77Classification": 81.68, - "EmotionClassification": 54.53, - "IFlyTek": 53.77, - "ImdbClassification": 95.58, - "JDReview": 88.2, - "MTOPDomainClassification (en)": 95.75, - "MTOPIntentClassification (en)": 84.26, - "MassiveIntentClassification (zh-CN)": 76.25, - "MassiveIntentClassification (en)": 78.47, - "MassiveScenarioClassification (en)": 78.19, - "MassiveScenarioClassification (zh-CN)": 77.26, - "MultilingualSentiment": 77.42, - "OnlineShopping": 94.48, - "TNews": 51.24, - "ToxicConversationsClassification": 78.75, - "TweetSentimentExtractionClassification": 66.0, - "Waimai": 88.63 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "gte-Qwen1.5-7B-instruct", - "ArxivClusteringP2P": 56.4, - "ArxivClusteringS2S": 51.45, - "BiorxivClusteringP2P": 49.01, - "BiorxivClusteringS2S": 45.06, - "CLSClusteringP2P": 47.21, - "CLSClusteringS2S": 45.79, - "MedrxivClusteringP2P": 44.37, - "MedrxivClusteringS2S": 42.0, - "RedditClustering": 73.37, - "RedditClusteringP2P": 72.51, - "StackExchangeClustering": 79.07, - "StackExchangeClusteringP2P": 49.57, - "ThuNewsClusteringP2P": 87.43, - "ThuNewsClusteringS2S": 87.9, - "TwentyNewsgroupsClustering": 51.31 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "gte-Qwen1.5-7B-instruct", - "Cmnli": 91.81, - "Ocnli": 85.22, - "SprintDuplicateQuestions": 95.99, - "TwitterSemEval2015": 79.36, - "TwitterURLCorpus": 86.79 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "gte-Qwen1.5-7B-instruct", - "AskUbuntuDupQuestions": 66.0, - "CMedQAv1": 86.37, - "CMedQAv2": 87.41, - "MindSmallReranking": 32.71, - "SciDocsRR": 87.89, - "StackOverflowDupQuestions": 53.93, - "T2Reranking": 68.11 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "gte-Qwen1.5-7B-instruct", - "ArguAna": 62.65, - "BrightRetrieval (stackoverflow)": 19.85, - "BrightRetrieval (earth_science)": 36.22, - "BrightRetrieval (leetcode)": 25.46, - "BrightRetrieval (theoremqa_questions)": 26.97, - "BrightRetrieval (economics)": 17.72, - "BrightRetrieval (robotics)": 13.47, - "BrightRetrieval (pony)": 9.79, - "BrightRetrieval (aops)": 14.36, - "BrightRetrieval (psychology)": 24.61, - "BrightRetrieval (theoremqa_theorems)": 26.66, - "BrightRetrieval (biology)": 30.92, - "BrightRetrieval (sustainable_living)": 14.93, - "CQADupstackRetrieval": 40.64, - "ClimateFEVER": 44.0, - "CmedqaRetrieval": 43.47, - "CovidRetrieval": 80.87, - "DBPedia": 48.04, - "DuRetrieval": 86.01, - "EcomRetrieval": 66.46, - "FEVER": 93.35, - "FiQA2018": 55.31, - "HotpotQA": 72.25, - "MMarcoRetrieval": 73.83, - "MSMARCO": 41.68, - "MedicalRetrieval": 61.33, - "NFCorpus": 38.25, - "NQ": 61.79, - "QuoraRetrieval": 89.61, - "SCIDOCS": 27.69, - "SciFact": 75.31, - "T2Retrieval": 83.58, - "TRECCOVID": 72.72, - "Touche2020": 20.3, - "VideoRetrieval": 69.41 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "gte-Qwen1.5-7B-instruct", - "AFQMC": 58.47, - "ATEC": 55.46, - "BIOSSES": 81.12, - "BQ": 77.59, - "LCQMC": 76.29, - "PAWSX": 50.22, - "QBQTC": 31.82, - "SICK-R": 79.15, - "STS12": 76.52, - "STS13": 88.63, - "STS14": 83.32, - "STS15": 87.5, - "STS16": 86.39, - "STS17 (en-en)": 87.79, - "STS22 (en)": 66.4, - "STS22 (zh)": 67.36, - "STSB": 81.37, - "STSBenchmark": 87.35 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "gte-Qwen1.5-7B-instruct", - "SummEval": 31.46 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "gte-Qwen1.5-7B-instruct" - } - ] - } - }, - "bge-large-en-v1.5-instruct": { - "BitextMining": { - "f1": [ - { - "Model": "bge-large-en-v1.5-instruct" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "bge-large-en-v1.5-instruct" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "bge-large-en-v1.5-instruct" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "bge-large-en-v1.5-instruct" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "bge-large-en-v1.5-instruct" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "bge-large-en-v1.5-instruct", - "ARCChallenge": 8.86, - "AlphaNLI": 0.86, - "HellaSwag": 26.24, - "PIQA": 23.26, - "Quail": 2.72, - "RARbCode": 45.25, - "RARbMath": 49.82, - "SIQA": 0.59, - "SpartQA": 2.34, - "TempReasonL1": 1.17, - "TempReasonL2Fact": 21.19, - "TempReasonL2Pure": 2.1, - "TempReasonL3Fact": 17.59, - "TempReasonL3Pure": 5.99, - "WinoGrande": 10.31 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "bge-large-en-v1.5-instruct" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "bge-large-en-v1.5-instruct" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "bge-large-en-v1.5-instruct" - } - ] - } - }, - "text-embedding-3-large-instruct": { - "BitextMining": { - "f1": [ - { - "Model": "text-embedding-3-large-instruct" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "text-embedding-3-large-instruct" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "text-embedding-3-large-instruct" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "text-embedding-3-large-instruct" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "text-embedding-3-large-instruct" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "text-embedding-3-large-instruct", - "ARCChallenge": 21.22, - "AlphaNLI": 34.23, - "HellaSwag": 31.4, - "PIQA": 37.52, - "Quail": 13.6, - "RARbCode": 89.41, - "RARbMath": 87.73, - "SIQA": 4.99, - "SpartQA": 7.45, - "TempReasonL1": 2.07, - "TempReasonL2Fact": 39.77, - "TempReasonL2Pure": 11.04, - "TempReasonL3Fact": 37.04, - "TempReasonL3Pure": 15.51, - "WinoGrande": 33.92 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "text-embedding-3-large-instruct" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "text-embedding-3-large-instruct" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "text-embedding-3-large-instruct" - } - ] - } - }, - "e5-mistral-7b-instruct-noinstruct": { - "BitextMining": { - "f1": [ - { - "Model": "e5-mistral-7b-instruct-noinstruct" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "e5-mistral-7b-instruct-noinstruct" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "e5-mistral-7b-instruct-noinstruct" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "e5-mistral-7b-instruct-noinstruct" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "e5-mistral-7b-instruct-noinstruct" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "e5-mistral-7b-instruct-noinstruct", - "ARCChallenge": 20.48, - "AlphaNLI": 18.88, - "HellaSwag": 32.25, - "PIQA": 32.8, - "Quail": 6.25, - "RARbCode": 79.84, - "RARbMath": 76.19, - "SIQA": 5.08, - "SpartQA": 10.87, - "TempReasonL1": 3.04, - "TempReasonL2Fact": 35.63, - "TempReasonL2Pure": 9.32, - "TempReasonL3Fact": 30.41, - "TempReasonL3Pure": 14.39, - "WinoGrande": 45.18 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "e5-mistral-7b-instruct-noinstruct" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "e5-mistral-7b-instruct-noinstruct" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "e5-mistral-7b-instruct-noinstruct" - } - ] - } - }, - "bge-large-zh-noinstruct": { - "BitextMining": { - "f1": [ - { - "Model": "bge-large-zh-noinstruct" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "bge-large-zh-noinstruct", - "AmazonReviewsClassification (zh)": 41.94, - "IFlyTek": 45.32, - "JDReview": 85.38, - "MassiveIntentClassification (zh-CN)": 66.96, - "MassiveScenarioClassification (zh-CN)": 73.39, - "MultilingualSentiment": 73.7, - "OnlineShopping": 91.66, - "TNews": 52.05, - "Waimai": 86.83 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "bge-large-zh-noinstruct", - "CLSClusteringP2P": 41.23, - "CLSClusteringS2S": 40.04, - "ThuNewsClusteringP2P": 62.03, - "ThuNewsClusteringS2S": 56.75 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "bge-large-zh-noinstruct", - "Cmnli": 82.17, - "Ocnli": 71.37 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "bge-large-zh-noinstruct", - "CMedQAv1": 81.72, - "CMedQAv2": 84.64, - "MMarcoReranking": 27.1, - "T2Reranking": 66.16 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "bge-large-zh-noinstruct", - "CmedqaRetrieval": 41.03, - "CovidRetrieval": 75.07, - "DuRetrieval": 84.68, - "EcomRetrieval": 65.6, - "MMarcoRetrieval": 81.38, - "MedicalRetrieval": 58.28, - "T2Retrieval": 84.39, - "VideoRetrieval": 73.93 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "bge-large-zh-noinstruct", - "AFQMC": 43.06, - "ATEC": 48.29, - "BQ": 60.53, - "LCQMC": 74.71, - "PAWSX": 16.64, - "QBQTC": 35.2, - "STS22 (zh)": 67.19, - "STSB": 78.41 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "bge-large-zh-noinstruct" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "bge-large-zh-noinstruct" - } - ] - } - }, - "e5-large-v2": { - "BitextMining": { - "f1": [ - { - "Model": "e5-large-v2" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "e5-large-v2" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "e5-large-v2", - "BiorxivClusteringP2P": 36.72, - "BiorxivClusteringS2S": 35.47, - "MedrxivClusteringP2P": 31.45, - "MedrxivClusteringS2S": 29.91, - "RedditClustering": 55.5, - "RedditClusteringP2P": 63.71, - "StackExchangeClustering": 65.23, - "StackExchangeClusteringP2P": 33.62, - "TwentyNewsgroupsClustering": 48.73 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "e5-large-v2" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "e5-large-v2" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "e5-large-v2" - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "e5-large-v2" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "e5-large-v2" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "e5-large-v2", - "Core17InstructionRetrieval": 0.12, - "News21InstructionRetrieval": 0.87, - "Robust04InstructionRetrieval": -4.16 - } - ] - } - }, - "voyage-multilingual-2": { - "BitextMining": { - "f1": [ - { - "Model": "voyage-multilingual-2" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "voyage-multilingual-2", - "AmazonReviewsClassification (fr)": 43.36, - "MTOPDomainClassification (fr)": 90.33, - "MTOPIntentClassification (fr)": 60.52, - "MasakhaNEWSClassification (fra)": 74.81, - "MassiveIntentClassification (fr)": 68.06, - "MassiveScenarioClassification (fr)": 74.29 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "voyage-multilingual-2", - "AlloProfClusteringP2P": 65.37, - "AlloProfClusteringS2S": 47.03, - "HALClusteringS2S": 27.67, - "MLSUMClusteringP2P (fr)": 45.99, - "MLSUMClusteringS2S (fr)": 45.57, - "MasakhaNEWSClusteringP2P (fra)": 44.53, - "MasakhaNEWSClusteringS2S (fra)": 49.8 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "voyage-multilingual-2", - "OpusparcusPC (fr)": 93.68, - "PawsXPairClassification (fr)": 63.64 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "voyage-multilingual-2", - "AlloprofReranking": 74.78, - "SyntecReranking": 90.4 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "voyage-multilingual-2", - "AlloprofRetrieval": 58.27, - "BSARDRetrieval": 5.14, - "LEMBNarrativeQARetrieval": 64.69, - "LEMBNeedleRetrieval": 75.25, - "LEMBPasskeyRetrieval": 97.0, - "LEMBQMSumRetrieval": 51.49, - "LEMBSummScreenFDRetrieval": 99.11, - "LEMBWikimQARetrieval": 87.49, - "MintakaRetrieval (fr)": 49.19, - "SyntecRetrieval": 87.28, - "XPQARetrieval (fr)": 72.92 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "voyage-multilingual-2", - "SICKFr": 74.9, - "STS22 (fr)": 82.76, - "STSBenchmarkMultilingualSTS (fr)": 82.72 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "voyage-multilingual-2", - "SummEvalFr": 29.96 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "voyage-multilingual-2" - } - ] - } - }, - "all-MiniLM-L6-v2-instruct": { - "BitextMining": { - "f1": [ - { - "Model": "all-MiniLM-L6-v2-instruct" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "all-MiniLM-L6-v2-instruct" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "all-MiniLM-L6-v2-instruct" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "all-MiniLM-L6-v2-instruct" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "all-MiniLM-L6-v2-instruct" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "all-MiniLM-L6-v2-instruct", - "ARCChallenge": 9.4, - "AlphaNLI": 15.09, - "HellaSwag": 20.51, - "PIQA": 24.68, - "Quail": 3.46, - "RARbCode": 42.47, - "RARbMath": 62.39, - "SIQA": 1.53, - "SpartQA": 0.57, - "TempReasonL1": 1.05, - "TempReasonL2Fact": 16.57, - "TempReasonL2Pure": 0.49, - "TempReasonL3Fact": 14.01, - "TempReasonL3Pure": 6.27, - "WinoGrande": 20.73 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "all-MiniLM-L6-v2-instruct" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "all-MiniLM-L6-v2-instruct" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "all-MiniLM-L6-v2-instruct" - } - ] - } - }, - "m3e-base": { - "BitextMining": { - "f1": [ - { - "Model": "m3e-base" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "m3e-base", - "AmazonReviewsClassification (zh)": 43.02, - "IFlyTek": 44.42, - "JDReview": 85.33, - "MassiveIntentClassification (zh-CN)": 68.4, - "MassiveScenarioClassification (zh-CN)": 74.6, - "MultilingualSentiment": 71.9, - "OnlineShopping": 87.77, - "TNews": 48.28, - "Waimai": 83.99 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "m3e-base", - "CLSClusteringP2P": 39.81, - "CLSClusteringS2S": 37.34, - "ThuNewsClusteringP2P": 59.77, - "ThuNewsClusteringS2S": 53.78 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "m3e-base", - "Cmnli": 69.98, - "Ocnli": 58.0 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "m3e-base", - "CMedQAv1": 77.05, - "CMedQAv2": 76.76, - "MMarcoReranking": 17.51, - "T2Reranking": 66.03 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "m3e-base", - "CmedqaRetrieval": 30.33, - "CovidRetrieval": 66.42, - "DuRetrieval": 75.76, - "EcomRetrieval": 50.27, - "MMarcoRetrieval": 65.46, - "MedicalRetrieval": 42.79, - "T2Retrieval": 73.14, - "VideoRetrieval": 51.11 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "m3e-base", - "AFQMC": 35.87, - "ATEC": 41.27, - "BQ": 63.81, - "LCQMC": 74.88, - "PAWSX": 12.19, - "QBQTC": 32.07, - "STS22 (zh)": 66.73, - "STSB": 76.97 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "m3e-base" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "m3e-base" - } - ] - } - }, - "text-search-davinci-001": { - "BitextMining": { - "f1": [ - { - "Model": "text-search-davinci-001" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "text-search-davinci-001" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "text-search-davinci-001" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "text-search-davinci-001" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "text-search-davinci-001" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "text-search-davinci-001", - "ArguAna": 43.5, - "ClimateFEVER": 22.3, - "FEVER": 77.5, - "FiQA2018": 51.2, - "HotpotQA": 68.8, - "NFCorpus": 40.7, - "QuoraRetrieval": 63.8, - "SciFact": 75.4, - "TRECCOVID": 64.9, - "Touche2020": 29.1 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "text-search-davinci-001" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "text-search-davinci-001" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "text-search-davinci-001" - } - ] - } - }, - "nomic-embed-text-v1.5-256": { - "BitextMining": { - "f1": [ - { - "Model": "nomic-embed-text-v1.5-256" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "nomic-embed-text-v1.5-256", - "AmazonCounterfactualClassification (en)": 72.94, - "AmazonPolarityClassification": 91.35, - "AmazonReviewsClassification (en)": 45.73, - "Banking77Classification": 83.69, - "EmotionClassification": 45.88, - "ImdbClassification": 83.99, - "MTOPDomainClassification (en)": 91.68, - "MTOPIntentClassification (en)": 72.47, - "MassiveIntentClassification (en)": 71.76, - "MassiveScenarioClassification (en)": 75.67, - "ToxicConversationsClassification": 70.87, - "TweetSentimentExtractionClassification": 59.2 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "nomic-embed-text-v1.5-256", - "ArxivClusteringP2P": 44.82, - "ArxivClusteringS2S": 35.32, - "BiorxivClusteringP2P": 38.19, - "BiorxivClusteringS2S": 31.83, - "MedrxivClusteringP2P": 34.08, - "MedrxivClusteringS2S": 30.98, - "RedditClustering": 54.92, - "RedditClusteringP2P": 60.23, - "StackExchangeClustering": 61.81, - "StackExchangeClusteringP2P": 34.03, - "TwentyNewsgroupsClustering": 48.56 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "nomic-embed-text-v1.5-256", - "SprintDuplicateQuestions": 92.31, - "TwitterSemEval2015": 73.61, - "TwitterURLCorpus": 86.34 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "nomic-embed-text-v1.5-256", - "AskUbuntuDupQuestions": 61.34, - "MindSmallReranking": 30.04, - "SciDocsRR": 79.4, - "StackOverflowDupQuestions": 49.95 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "nomic-embed-text-v1.5-256", - "ArguAna": 45.44, - "CQADupstackRetrieval": 37.61, - "ClimateFEVER": 39.63, - "DBPedia": 39.42, - "FEVER": 84.4, - "FiQA2018": 35.0, - "HotpotQA": 67.78, - "MSMARCO": 41.38, - "NFCorpus": 32.54, - "NQ": 57.1, - "QuoraRetrieval": 87.65, - "SCIDOCS": 16.76, - "SciFact": 68.24, - "TRECCOVID": 80.65, - "Touche2020": 28.49 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "nomic-embed-text-v1.5-256", - "BIOSSES": 81.58, - "SICK-R": 79.24, - "STS12": 78.16, - "STS13": 86.01, - "STS14": 81.25, - "STS15": 86.51, - "STS16": 84.24, - "STS17 (en-en)": 86.44, - "STS22 (en)": 65.14, - "STSBenchmark": 84.8 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "nomic-embed-text-v1.5-256", - "SummEval": 30.05 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "nomic-embed-text-v1.5-256" - } - ] - } - }, - "sbert_large_nlu_ru": { - "BitextMining": { - "f1": [ - { - "Model": "sbert_large_nlu_ru" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "sbert_large_nlu_ru", - "GeoreviewClassification (rus-Cyrl)": 39.97, - "HeadlineClassification (rus-Cyrl)": 79.26, - "InappropriatenessClassification (rus-Cyrl)": 62.52, - "KinopoiskClassification (rus-Cyrl)": 49.51, - "MassiveIntentClassification (rus-Cyrl)": 61.09, - "MassiveScenarioClassification (rus-Cyrl)": 67.6, - "RuReviewsClassification (rus-Cyrl)": 58.27, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 53.9, - "RuSciBenchOECDClassification (rus-Cyrl)": 43.04 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "sbert_large_nlu_ru", - "GeoreviewClusteringP2P (rus-Cyrl)": 59.02, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 50.4, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 46.41 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "sbert_large_nlu_ru", - "TERRa (rus-Cyrl)": 50.17 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "sbert_large_nlu_ru", - "RuBQReranking (rus-Cyrl)": 46.81 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "sbert_large_nlu_ru", - "RiaNewsRetrieval (rus-Cyrl)": 11.11, - "RuBQRetrieval (rus-Cyrl)": 12.45 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "sbert_large_nlu_ru", - "RUParaPhraserSTS (rus-Cyrl)": 62.06, - "RuSTSBenchmarkSTS (rus-Cyrl)": 58.82, - "STS22 (rus-Cyrl)": 50.75 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "sbert_large_nlu_ru" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "sbert_large_nlu_ru" - } - ] - } - }, - "rubert-tiny": { - "BitextMining": { - "f1": [ - { - "Model": "rubert-tiny" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "rubert-tiny", - "GeoreviewClassification (rus-Cyrl)": 33.45, - "HeadlineClassification (rus-Cyrl)": 57.65, - "InappropriatenessClassification (rus-Cyrl)": 54.5, - "KinopoiskClassification (rus-Cyrl)": 41.36, - "MassiveIntentClassification (rus-Cyrl)": 50.1, - "MassiveScenarioClassification (rus-Cyrl)": 52.15, - "RuReviewsClassification (rus-Cyrl)": 49.56, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 35.71, - "RuSciBenchOECDClassification (rus-Cyrl)": 26.51 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "rubert-tiny", - "GeoreviewClusteringP2P (rus-Cyrl)": 34.4, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 29.89, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 27.98 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "rubert-tiny", - "TERRa (rus-Cyrl)": 51.06 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "rubert-tiny", - "RuBQReranking (rus-Cyrl)": 35.44 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "rubert-tiny", - "RiaNewsRetrieval (rus-Cyrl)": 0.79, - "RuBQRetrieval (rus-Cyrl)": 3.24 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "rubert-tiny", - "RUParaPhraserSTS (rus-Cyrl)": 53.41, - "RuSTSBenchmarkSTS (rus-Cyrl)": 58.16, - "STS22 (rus-Cyrl)": 47.88 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "rubert-tiny" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "rubert-tiny" - } - ] - } - }, - "google-gecko-256.text-embedding-preview-0409": { - "BitextMining": { - "f1": [ - { - "Model": "google-gecko-256.text-embedding-preview-0409" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "google-gecko-256.text-embedding-preview-0409", - "AmazonCounterfactualClassification (en)": 70.93, - "AmazonPolarityClassification": 97.34, - "AmazonReviewsClassification (en)": 48.47, - "Banking77Classification": 86.01, - "EmotionClassification": 51.53, - "ImdbClassification": 95.7, - "MTOPDomainClassification (en)": 98.02, - "MTOPIntentClassification (en)": 77.82, - "MassiveIntentClassification (en)": 75.67, - "MassiveScenarioClassification (en)": 85.16, - "ToxicConversationsClassification": 88.33, - "TweetSentimentExtractionClassification": 72.97 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "google-gecko-256.text-embedding-preview-0409", - "ArxivClusteringP2P": 44.12, - "ArxivClusteringS2S": 36.54, - "BiorxivClusteringP2P": 36.28, - "BiorxivClusteringS2S": 33.09, - "MedrxivClusteringP2P": 32.08, - "MedrxivClusteringS2S": 30.84, - "RedditClustering": 62.24, - "RedditClusteringP2P": 63.7, - "StackExchangeClustering": 70.19, - "StackExchangeClusteringP2P": 36.1, - "TwentyNewsgroupsClustering": 50.6 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "google-gecko-256.text-embedding-preview-0409", - "SprintDuplicateQuestions": 96.49, - "TwitterSemEval2015": 78.23, - "TwitterURLCorpus": 87.04 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "google-gecko-256.text-embedding-preview-0409", - "AskUbuntuDupQuestions": 63.84, - "MindSmallReranking": 31.89, - "SciDocsRR": 81.62, - "StackOverflowDupQuestions": 53.76 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "google-gecko-256.text-embedding-preview-0409", - "ArguAna": 56.27, - "CQADupstackRetrieval": 45.41, - "ClimateFEVER": 29.35, - "DBPedia": 41.91, - "FEVER": 82.61, - "FiQA2018": 55.54, - "HotpotQA": 64.65, - "MSMARCO": 31.12, - "NFCorpus": 37.81, - "NQ": 57.37, - "QuoraRetrieval": 87.89, - "SCIDOCS": 18.21, - "SciFact": 70.86, - "TRECCOVID": 80.13, - "Touche2020": 27.4 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "google-gecko-256.text-embedding-preview-0409", - "BIOSSES": 89.42, - "SICK-R": 81.67, - "STS12": 78.02, - "STS13": 90.1, - "STS14": 85.44, - "STS15": 89.64, - "STS16": 87.24, - "STS17 (en-en)": 90.46, - "STS22 (en)": 67.99, - "STSBenchmark": 89.33 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "google-gecko-256.text-embedding-preview-0409", - "SummEval": 32.36 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "google-gecko-256.text-embedding-preview-0409" - } - ] - } - }, - "FollowIR-7B": { - "BitextMining": { - "f1": [ - { - "Model": "FollowIR-7B" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "FollowIR-7B" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "FollowIR-7B" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "FollowIR-7B" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "FollowIR-7B" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "FollowIR-7B" - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "FollowIR-7B" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "FollowIR-7B" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "FollowIR-7B", - "Core17InstructionRetrieval": 16.48, - "News21InstructionRetrieval": 6.26, - "Robust04InstructionRetrieval": 13.72 - } - ] - } - }, - "bert-base-uncased": { - "BitextMining": { - "f1": [ - { - "Model": "bert-base-uncased" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "bert-base-uncased", - "AmazonCounterfactualClassification (en)": 74.25, - "AmazonPolarityClassification": 71.33, - "AmazonReviewsClassification (en)": 33.56, - "Banking77Classification": 63.41, - "EmotionClassification": 35.28, - "ImdbClassification": 65.35, - "MTOPDomainClassification (en)": 82.63, - "MTOPIntentClassification (en)": 68.14, - "MassiveIntentClassification (en)": 59.88, - "MassiveScenarioClassification (en)": 64.28, - "ToxicConversationsClassification": 70.0, - "TweetSentimentExtractionClassification": 51.81 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "bert-base-uncased", - "ArxivClusteringP2P": 35.19, - "ArxivClusteringS2S": 27.51, - "BiorxivClusteringP2P": 30.12, - "BiorxivClusteringS2S": 24.77, - "MedrxivClusteringP2P": 26.09, - "MedrxivClusteringS2S": 23.6, - "RedditClustering": 27.24, - "RedditClusteringP2P": 43.32, - "StackExchangeClustering": 43.58, - "StackExchangeClusteringP2P": 26.55, - "TwentyNewsgroupsClustering": 23.35 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "bert-base-uncased", - "SprintDuplicateQuestions": 36.81, - "TwitterSemEval2015": 55.9, - "TwitterURLCorpus": 76.29 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "bert-base-uncased", - "AskUbuntuDupQuestions": 45.84, - "MindSmallReranking": 28.37, - "SciDocsRR": 64.94, - "StackOverflowDupQuestions": 34.62 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "bert-base-uncased", - "ArguAna": 28.29, - "CQADupstackRetrieval": 5.51, - "ClimateFEVER": 5.41, - "DBPedia": 4.13, - "FEVER": 3.3, - "FiQA2018": 2.19, - "HotpotQA": 8.26, - "MSMARCO": 1.91, - "NFCorpus": 4.3, - "NQ": 2.62, - "QuoraRetrieval": 61.03, - "SCIDOCS": 2.82, - "SciFact": 13.34, - "TRECCOVID": 14.74, - "Touche2020": 0.97 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "bert-base-uncased", - "BIOSSES": 54.7, - "SICK-R": 58.65, - "STS12": 30.87, - "STS13": 59.89, - "STS14": 47.73, - "STS15": 60.29, - "STS16": 63.73, - "STS17 (en-en)": 64.1, - "STS22 (en)": 56.37, - "STSBenchmark": 47.29 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "bert-base-uncased", - "SummEval": 29.82 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "bert-base-uncased" - } - ] - } - }, - "xlm-roberta-base": { - "BitextMining": { - "f1": [ - { - "Model": "xlm-roberta-base", - "BornholmBitextMining": 4.42 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "xlm-roberta-base", - "AmazonReviewsClassification (fr)": 26.75, - "AngryTweetsClassification": 52.41, - "DKHateClassification": 56.78, - "DanishPoliticalCommentsClassification": 34.03, - "LccSentimentClassification": 52.27, - "MTOPDomainClassification (fr)": 43.83, - "MTOPIntentClassification (fr)": 19.38, - "MasakhaNEWSClassification (fra)": 60.5, - "MassiveIntentClassification (da)": 41.06, - "MassiveIntentClassification (nb)": 40.46, - "MassiveIntentClassification (sv)": 45.12, - "MassiveIntentClassification (fr)": 13.58, - "MassiveScenarioClassification (da)": 43.91, - "MassiveScenarioClassification (nb)": 44.83, - "MassiveScenarioClassification (sv)": 47.35, - "MassiveScenarioClassification (fr)": 23.21, - "NoRecClassification": 46.28, - "NordicLangClassification": 79.39, - "NorwegianParliament": 56.75, - "ScalaDaClassification": 57.3, - "ScalaNbClassification": 58.33 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "xlm-roberta-base", - "AlloProfClusteringP2P": 52.24, - "AlloProfClusteringS2S": 20.37, - "HALClusteringS2S": 8.68, - "MLSUMClusteringP2P": 40.44, - "MLSUMClusteringS2S": 24.14, - "MasakhaNEWSClusteringP2P (fra)": 29.29, - "MasakhaNEWSClusteringS2S (fra)": 23.76 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "xlm-roberta-base", - "OpusparcusPC (fr)": 85.45, - "PawsXPairClassification (fr)": 51.35 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "xlm-roberta-base", - "AlloprofReranking": 25.58, - "SyntecReranking": 43.75 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "xlm-roberta-base", - "AlloprofRetrieval": 0.16, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 0.88, - "SyntecRetrieval": 3.33, - "XPQARetrieval (fr)": 11.65 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "xlm-roberta-base", - "SICKFr": 48.62, - "STS22 (fr)": 56.72, - "STSBenchmarkMultilingualSTS (fr)": 46.23 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "xlm-roberta-base", - "SummEvalFr": 29.14 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "xlm-roberta-base" - } - ] - } - }, - "text-embedding-3-large-256": { - "BitextMining": { - "f1": [ - { - "Model": "text-embedding-3-large-256" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "text-embedding-3-large-256", - "AmazonCounterfactualClassification (en)": 73.96, - "AmazonPolarityClassification": 91.32, - "AmazonReviewsClassification (en)": 46.03, - "Banking77Classification": 83.19, - "EmotionClassification": 45.8, - "ImdbClassification": 85.93, - "MTOPDomainClassification (en)": 92.76, - "MTOPIntentClassification (en)": 70.45, - "MassiveIntentClassification (en)": 71.12, - "MassiveScenarioClassification (en)": 75.56, - "ToxicConversationsClassification": 68.52, - "TweetSentimentExtractionClassification": 58.98 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "text-embedding-3-large-256", - "ArxivClusteringP2P": 47.05, - "ArxivClusteringS2S": 42.59, - "BiorxivClusteringP2P": 35.43, - "BiorxivClusteringS2S": 33.86, - "MedrxivClusteringP2P": 32.1, - "MedrxivClusteringS2S": 31.15, - "RedditClustering": 60.18, - "RedditClusteringP2P": 64.71, - "StackExchangeClustering": 71.23, - "StackExchangeClusteringP2P": 35.95, - "TwentyNewsgroupsClustering": 54.24 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "text-embedding-3-large-256", - "SprintDuplicateQuestions": 89.02, - "TwitterSemEval2015": 76.56, - "TwitterURLCorpus": 87.09 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "text-embedding-3-large-256", - "AskUbuntuDupQuestions": 64.61, - "MindSmallReranking": 29.63, - "SciDocsRR": 84.25, - "StackOverflowDupQuestions": 53.46 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "text-embedding-3-large-256", - "ArguAna": 55.6, - "CQADupstackRetrieval": 42.28, - "ClimateFEVER": 25.8, - "DBPedia": 40.8, - "FEVER": 84.57, - "FiQA2018": 50.33, - "HotpotQA": 62.69, - "MSMARCO": 37.93, - "NFCorpus": 37.94, - "NQ": 56.64, - "QuoraRetrieval": 88.22, - "SCIDOCS": 20.44, - "SciFact": 73.1, - "TRECCOVID": 76.24, - "Touche2020": 22.31 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "text-embedding-3-large-256", - "BIOSSES": 84.87, - "SICK-R": 79.18, - "STS12": 71.98, - "STS13": 85.52, - "STS14": 80.5, - "STS15": 87.51, - "STS16": 84.48, - "STS17 (en-en)": 88.11, - "STS22 (en)": 65.92, - "STSBenchmark": 82.34 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "text-embedding-3-large-256", - "SummEval": 29.92 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "text-embedding-3-large-256" - } - ] - } - }, - "LaBSE-en-ru": { - "BitextMining": { - "f1": [ - { - "Model": "LaBSE-en-ru", - "Tatoeba (rus-Cyrl_eng-Latn)": 93.62 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "LaBSE-en-ru", - "GeoreviewClassification (rus-Cyrl)": 40.89, - "HeadlineClassification (rus-Cyrl)": 68.75, - "InappropriatenessClassification (rus-Cyrl)": 58.48, - "KinopoiskClassification (rus-Cyrl)": 49.85, - "MassiveIntentClassification (swa-Latn)": 19.98, - "MassiveIntentClassification (aze-Latn)": 19.52, - "MassiveIntentClassification (tur-Latn)": 24.12, - "MassiveIntentClassification (cmo-Hans)": 3.96, - "MassiveIntentClassification (amh-Ethi)": 2.76, - "MassiveIntentClassification (kan-Knda)": 2.86, - "MassiveIntentClassification (hin-Deva)": 3.29, - "MassiveIntentClassification (tgl-Latn)": 27.08, - "MassiveIntentClassification (tha-Thai)": 4.0, - "MassiveIntentClassification (swe-Latn)": 32.01, - "MassiveIntentClassification (deu-Latn)": 35.14, - "MassiveIntentClassification (spa-Latn)": 37.67, - "MassiveIntentClassification (por-Latn)": 39.84, - "MassiveIntentClassification (jpn-Jpan)": 4.78, - "MassiveIntentClassification (fin-Latn)": 31.11, - "MassiveIntentClassification (kat-Geor)": 2.87, - "MassiveIntentClassification (slv-Latn)": 35.66, - "MassiveIntentClassification (rus-Cyrl)": 60.53, - "MassiveIntentClassification (ita-Latn)": 43.32, - "MassiveIntentClassification (tel-Telu)": 2.72, - "MassiveIntentClassification (afr-Latn)": 30.59, - "MassiveIntentClassification (isl-Latn)": 25.61, - "MassiveIntentClassification (fas-Arab)": 3.71, - "MassiveIntentClassification (vie-Latn)": 23.0, - "MassiveIntentClassification (ben-Beng)": 3.35, - "MassiveIntentClassification (hye-Armn)": 2.8, - "MassiveIntentClassification (pol-Latn)": 31.3, - "MassiveIntentClassification (cym-Latn)": 26.59, - "MassiveIntentClassification (jav-Latn)": 26.84, - "MassiveIntentClassification (mon-Cyrl)": 35.97, - "MassiveIntentClassification (en)": 60.48, - "MassiveIntentClassification (msa-Latn)": 27.82, - "MassiveIntentClassification (nob-Latn)": 35.78, - "MassiveIntentClassification (heb-Hebr)": 2.33, - "MassiveIntentClassification (khm-Khmr)": 4.6, - "MassiveIntentClassification (nld-Latn)": 34.66, - "MassiveIntentClassification (ind-Latn)": 33.31, - "MassiveIntentClassification (mal-Mlym)": 2.63, - "MassiveIntentClassification (tam-Taml)": 2.22, - "MassiveIntentClassification (mya-Mymr)": 3.57, - "MassiveIntentClassification (urd-Arab)": 3.36, - "MassiveIntentClassification (dan-Latn)": 38.66, - "MassiveIntentClassification (cmo-Hant)": 5.29, - "MassiveIntentClassification (ron-Latn)": 37.45, - "MassiveIntentClassification (lav-Latn)": 23.92, - "MassiveIntentClassification (fra-Latn)": 40.29, - "MassiveIntentClassification (ell-Grek)": 11.14, - "MassiveIntentClassification (sqi-Latn)": 35.84, - "MassiveIntentClassification (hun-Latn)": 26.74, - "MassiveIntentClassification (kor-Kore)": 2.69, - "MassiveIntentClassification (ara-Arab)": 5.19, - "MassiveScenarioClassification (swa-Latn)": 25.61, - "MassiveScenarioClassification (aze-Latn)": 24.48, - "MassiveScenarioClassification (tur-Latn)": 31.38, - "MassiveScenarioClassification (cmo-Hans)": 9.98, - "MassiveScenarioClassification (amh-Ethi)": 7.59, - "MassiveScenarioClassification (kan-Knda)": 8.73, - "MassiveScenarioClassification (hin-Deva)": 8.77, - "MassiveScenarioClassification (tgl-Latn)": 35.12, - "MassiveScenarioClassification (tha-Thai)": 8.69, - "MassiveScenarioClassification (swe-Latn)": 35.83, - "MassiveScenarioClassification (deu-Latn)": 41.72, - "MassiveScenarioClassification (spa-Latn)": 43.33, - "MassiveScenarioClassification (por-Latn)": 44.62, - "MassiveScenarioClassification (jpn-Jpan)": 9.51, - "MassiveScenarioClassification (fin-Latn)": 33.79, - "MassiveScenarioClassification (kat-Geor)": 7.32, - "MassiveScenarioClassification (slv-Latn)": 37.6, - "MassiveScenarioClassification (rus-Cyrl)": 65.15, - "MassiveScenarioClassification (ita-Latn)": 47.28, - "MassiveScenarioClassification (tel-Telu)": 7.53, - "MassiveScenarioClassification (afr-Latn)": 37.27, - "MassiveScenarioClassification (isl-Latn)": 30.32, - "MassiveScenarioClassification (fas-Arab)": 6.83, - "MassiveScenarioClassification (vie-Latn)": 28.92, - "MassiveScenarioClassification (ben-Beng)": 8.57, - "MassiveScenarioClassification (hye-Armn)": 8.91, - "MassiveScenarioClassification (pol-Latn)": 33.75, - "MassiveScenarioClassification (cym-Latn)": 30.38, - "MassiveScenarioClassification (jav-Latn)": 33.94, - "MassiveScenarioClassification (mon-Cyrl)": 41.53, - "MassiveScenarioClassification (en)": 65.43, - "MassiveScenarioClassification (msa-Latn)": 36.28, - "MassiveScenarioClassification (nob-Latn)": 42.43, - "MassiveScenarioClassification (heb-Hebr)": 8.64, - "MassiveScenarioClassification (khm-Khmr)": 9.99, - "MassiveScenarioClassification (nld-Latn)": 41.47, - "MassiveScenarioClassification (ind-Latn)": 39.05, - "MassiveScenarioClassification (mal-Mlym)": 7.24, - "MassiveScenarioClassification (tam-Taml)": 7.71, - "MassiveScenarioClassification (mya-Mymr)": 9.94, - "MassiveScenarioClassification (urd-Arab)": 9.16, - "MassiveScenarioClassification (dan-Latn)": 44.69, - "MassiveScenarioClassification (cmo-Hant)": 10.48, - "MassiveScenarioClassification (ron-Latn)": 44.55, - "MassiveScenarioClassification (lav-Latn)": 26.26, - "MassiveScenarioClassification (fra-Latn)": 45.08, - "MassiveScenarioClassification (ell-Grek)": 19.46, - "MassiveScenarioClassification (sqi-Latn)": 40.9, - "MassiveScenarioClassification (hun-Latn)": 33.92, - "MassiveScenarioClassification (kor-Kore)": 7.37, - "MassiveScenarioClassification (ara-Arab)": 12.43, - "RuReviewsClassification (rus-Cyrl)": 58.01, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 52.8, - "RuSciBenchOECDClassification (rus-Cyrl)": 40.36 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "LaBSE-en-ru", - "GeoreviewClusteringP2P (rus-Cyrl)": 51.89, - "MLSUMClusteringP2P (rus-Cyrl)": 37.87, - "MLSUMClusteringS2S (rus-Cyrl)": 41.24, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 47.48, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.16 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "LaBSE-en-ru", - "OpusparcusPC (rus-Cyrl)": 87.18, - "TERRa (rus-Cyrl)": 55.61 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "LaBSE-en-ru", - "RuBQReranking (rus-Cyrl)": 54.83 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "LaBSE-en-ru", - "RiaNewsRetrieval (rus-Cyrl)": 34.73, - "RuBQRetrieval (rus-Cyrl)": 29.03 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "LaBSE-en-ru", - "RUParaPhraserSTS (rus-Cyrl)": 65.87, - "RuSTSBenchmarkSTS (rus-Cyrl)": 73.32, - "STS22 (deu-Latn)": 38.9, - "STS22 (en)": 59.47, - "STS22 (pol-Latn_eng-Latn)": 58.73, - "STS22 (spa-Latn)": 60.85, - "STS22 (fra-Latn)": 74.98, - "STS22 (deu-Latn_eng-Latn)": 47.98, - "STS22 (deu-Latn_fra-Latn)": 59.4, - "STS22 (deu-Latn_pol-Latn)": 39.48, - "STS22 (pol-Latn)": 32.74, - "STS22 (tur-Latn)": 55.04, - "STS22 (spa-Latn_eng-Latn)": 70.8, - "STS22 (rus-Cyrl)": 58.53, - "STS22 (ita-Latn)": 68.58, - "STS22 (fra-Latn_pol-Latn)": 61.98, - "STS22 (spa-Latn_ita-Latn)": 66.83, - "STS22 (cmn-Hans_eng-Latn)": 24.98, - "STS22 (ara-Arab)": 31.85, - "STS22 (cmn-Hans)": 35.1, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 73.02 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "LaBSE-en-ru" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "LaBSE-en-ru" - } - ] - } - }, - "gelectra-base": { - "BitextMining": { - "f1": [ - { - "Model": "gelectra-base" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "gelectra-base" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "gelectra-base", - "BlurbsClusteringP2P": 10.06, - "BlurbsClusteringS2S": 7.74, - "TenKGnadClusteringP2P": 9.02, - "TenKGnadClusteringS2S": 4.11 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "gelectra-base" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "gelectra-base" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "gelectra-base" - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "gelectra-base" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "gelectra-base" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "gelectra-base" - } - ] - } - }, - "OpenSearch-text-hybrid": { - "BitextMining": { - "f1": [ - { - "Model": "OpenSearch-text-hybrid" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "OpenSearch-text-hybrid", - "AmazonReviewsClassification (zh)": 46.18, - "IFlyTek": 51.8, - "JDReview": 86.02, - "MassiveIntentClassification (zh-CN)": 73.85, - "MassiveScenarioClassification (zh-CN)": 77.13, - "MultilingualSentiment": 76.35, - "OnlineShopping": 93.2, - "TNews": 53.06, - "Waimai": 88.1 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "OpenSearch-text-hybrid", - "CLSClusteringP2P": 41.64, - "CLSClusteringS2S": 40.33, - "ThuNewsClusteringP2P": 69.28, - "ThuNewsClusteringS2S": 63.75 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "OpenSearch-text-hybrid", - "Cmnli": 90.77, - "Ocnli": 85.44 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "OpenSearch-text-hybrid", - "CMedQAv1": 88.99, - "CMedQAv2": 89.6, - "MMarcoReranking": 28.12, - "T2Reranking": 66.38 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "OpenSearch-text-hybrid", - "CmedqaRetrieval": 46.56, - "CovidRetrieval": 84.03, - "DuRetrieval": 87.85, - "EcomRetrieval": 68.79, - "MMarcoRetrieval": 79.93, - "MedicalRetrieval": 65.92, - "T2Retrieval": 86.76, - "VideoRetrieval": 75.43 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "OpenSearch-text-hybrid", - "AFQMC": 59.11, - "ATEC": 58.19, - "BQ": 71.07, - "LCQMC": 78.27, - "PAWSX": 44.98, - "QBQTC": 38.69, - "STS22 (zh)": 66.53, - "STSB": 82.8 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "OpenSearch-text-hybrid" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "OpenSearch-text-hybrid" - } - ] - } - }, - "gbert-large": { - "BitextMining": { - "f1": [ - { - "Model": "gbert-large" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "gbert-large" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "gbert-large", - "BlurbsClusteringP2P": 39.3, - "BlurbsClusteringS2S": 13.38, - "TenKGnadClusteringP2P": 41.69, - "TenKGnadClusteringS2S": 34.97 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "gbert-large" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "gbert-large" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "gbert-large" - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "gbert-large" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "gbert-large" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "gbert-large" - } - ] - } - }, - "msmarco-bert-co-condensor": { - "BitextMining": { - "f1": [ - { - "Model": "msmarco-bert-co-condensor" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "msmarco-bert-co-condensor", - "AmazonCounterfactualClassification (en)": 64.06, - "AmazonPolarityClassification": 66.88, - "AmazonReviewsClassification (en)": 34.85, - "Banking77Classification": 82.35, - "EmotionClassification": 41.91, - "ImdbClassification": 60.17, - "MTOPDomainClassification (en)": 91.34, - "MTOPIntentClassification (en)": 71.07, - "MassiveIntentClassification (en)": 70.4, - "MassiveScenarioClassification (en)": 73.73, - "ToxicConversationsClassification": 64.01, - "TweetSentimentExtractionClassification": 55.74 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "msmarco-bert-co-condensor", - "ArxivClusteringP2P": 36.94, - "ArxivClusteringS2S": 29.03, - "BiorxivClusteringP2P": 32.35, - "BiorxivClusteringS2S": 28.16, - "MedrxivClusteringP2P": 30.23, - "MedrxivClusteringS2S": 27.01, - "RedditClustering": 48.04, - "RedditClusteringP2P": 53.53, - "StackExchangeClustering": 59.54, - "StackExchangeClusteringP2P": 30.48, - "TwentyNewsgroupsClustering": 38.68 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "msmarco-bert-co-condensor", - "SprintDuplicateQuestions": 96.09, - "TwitterSemEval2015": 65.95, - "TwitterURLCorpus": 83.17 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "msmarco-bert-co-condensor", - "AskUbuntuDupQuestions": 58.99, - "MindSmallReranking": 27.13, - "SciDocsRR": 72.78, - "StackOverflowDupQuestions": 48.48 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "msmarco-bert-co-condensor", - "ArguAna": 45.15, - "CQADupstackRetrieval": 27.72, - "ClimateFEVER": 16.96, - "DBPedia": 27.86, - "FEVER": 45.68, - "FiQA2018": 15.62, - "HotpotQA": 35.61, - "MSMARCO": 29.57, - "NFCorpus": 22.29, - "NQ": 29.85, - "QuoraRetrieval": 86.51, - "SCIDOCS": 10.13, - "SciFact": 52.31, - "TRECCOVID": 40.54, - "Touche2020": 8.57 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "msmarco-bert-co-condensor", - "BIOSSES": 77.32, - "SICK-R": 72.0, - "STS12": 68.19, - "STS13": 80.4, - "STS14": 74.02, - "STS15": 82.57, - "STS16": 79.78, - "STS17 (en-en)": 85.94, - "STS22 (en)": 67.54, - "STSBenchmark": 76.97 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "msmarco-bert-co-condensor", - "SummEval": 29.5 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "msmarco-bert-co-condensor" - } - ] - } - }, - "paraphrase-multilingual-mpnet-base-v2": { - "BitextMining": { - "f1": [ - { - "Model": "paraphrase-multilingual-mpnet-base-v2", - "BUCC (de-en)": 98.59, - "BUCC (fr-en)": 96.89, - "BUCC (ru-en)": 96.44, - "BUCC (zh-en)": 97.56, - "BornholmBitextMining (dan-Latn)": 18.18, - "Tatoeba (afr-eng)": 72.96, - "Tatoeba (amh-eng)": 53.49, - "Tatoeba (ang-eng)": 16.72, - "Tatoeba (ara-eng)": 90.19, - "Tatoeba (arq-eng)": 19.84, - "Tatoeba (arz-eng)": 55.69, - "Tatoeba (ast-eng)": 70.08, - "Tatoeba (awa-eng)": 42.83, - "Tatoeba (aze-eng)": 76.36, - "Tatoeba (bel-eng)": 79.94, - "Tatoeba (ben-eng)": 64.9, - "Tatoeba (ber-eng)": 4.88, - "Tatoeba (bos-eng)": 94.02, - "Tatoeba (bre-eng)": 6.42, - "Tatoeba (bul-eng)": 93.52, - "Tatoeba (cat-eng)": 96.05, - "Tatoeba (cbk-eng)": 58.68, - "Tatoeba (ceb-eng)": 7.39, - "Tatoeba (ces-eng)": 95.73, - "Tatoeba (cha-eng)": 12.59, - "Tatoeba (cmn-eng)": 95.83, - "Tatoeba (cor-eng)": 3.53, - "Tatoeba (csb-eng)": 23.73, - "Tatoeba (cym-eng)": 22.31, - "Tatoeba (dan-eng)": 96.17, - "Tatoeba (deu-eng)": 97.73, - "Tatoeba (dsb-eng)": 36.85, - "Tatoeba (dtp-eng)": 5.03, - "Tatoeba (ell-eng)": 94.93, - "Tatoeba (epo-eng)": 55.12, - "Tatoeba (est-eng)": 98.4, - "Tatoeba (eus-eng)": 31.33, - "Tatoeba (fao-eng)": 38.24, - "Tatoeba (fin-eng)": 95.92, - "Tatoeba (fra-eng)": 93.12, - "Tatoeba (fry-eng)": 43.54, - "Tatoeba (gla-eng)": 4.72, - "Tatoeba (gle-eng)": 16.85, - "Tatoeba (glg-eng)": 95.32, - "Tatoeba (gsw-eng)": 25.12, - "Tatoeba (heb-eng)": 88.26, - "Tatoeba (hin-eng)": 97.75, - "Tatoeba (hrv-eng)": 97.0, - "Tatoeba (hsb-eng)": 44.32, - "Tatoeba (hun-eng)": 94.18, - "Tatoeba (hye-eng)": 94.38, - "Tatoeba (ido-eng)": 43.91, - "Tatoeba (ile-eng)": 60.36, - "Tatoeba (ina-eng)": 84.32, - "Tatoeba (ind-eng)": 93.5, - "Tatoeba (isl-eng)": 59.25, - "Tatoeba (ita-eng)": 93.76, - "Tatoeba (jav-eng)": 23.39, - "Tatoeba (jpn-eng)": 92.51, - "Tatoeba (kab-eng)": 1.41, - "Tatoeba (kat-eng)": 95.46, - "Tatoeba (kaz-eng)": 61.49, - "Tatoeba (khm-eng)": 58.8, - "Tatoeba (kor-eng)": 93.07, - "Tatoeba (kur-eng)": 61.44, - "Tatoeba (kzj-eng)": 5.88, - "Tatoeba (lat-eng)": 24.25, - "Tatoeba (lfn-eng)": 49.56, - "Tatoeba (lit-eng)": 95.37, - "Tatoeba (lvs-eng)": 97.53, - "Tatoeba (mal-eng)": 88.46, - "Tatoeba (mar-eng)": 93.83, - "Tatoeba (max-eng)": 48.77, - "Tatoeba (mhr-eng)": 7.57, - "Tatoeba (mkd-eng)": 93.02, - "Tatoeba (mon-eng)": 96.14, - "Tatoeba (nds-eng)": 38.88, - "Tatoeba (nld-eng)": 95.5, - "Tatoeba (nno-eng)": 81.41, - "Tatoeba (nob-eng)": 98.53, - "Tatoeba (nov-eng)": 50.23, - "Tatoeba (oci-eng)": 43.49, - "Tatoeba (orv-eng)": 23.77, - "Tatoeba (pam-eng)": 5.39, - "Tatoeba (pes-eng)": 93.47, - "Tatoeba (pms-eng)": 34.19, - "Tatoeba (pol-eng)": 96.95, - "Tatoeba (por-eng)": 93.02, - "Tatoeba (ron-eng)": 96.43, - "Tatoeba (rus-eng)": 92.92, - "Tatoeba (slk-eng)": 96.62, - "Tatoeba (slv-eng)": 97.08, - "Tatoeba (spa-eng)": 97.0, - "Tatoeba (sqi-eng)": 98.57, - "Tatoeba (srp-eng)": 94.12, - "Tatoeba (swe-eng)": 95.45, - "Tatoeba (swg-eng)": 22.8, - "Tatoeba (swh-eng)": 16.02, - "Tatoeba (tam-eng)": 73.6, - "Tatoeba (tat-eng)": 10.89, - "Tatoeba (tel-eng)": 79.73, - "Tatoeba (tgl-eng)": 17.67, - "Tatoeba (tha-eng)": 95.99, - "Tatoeba (tuk-eng)": 14.91, - "Tatoeba (tur-eng)": 96.17, - "Tatoeba (tzl-eng)": 34.21, - "Tatoeba (uig-eng)": 48.35, - "Tatoeba (ukr-eng)": 92.67, - "Tatoeba (urd-eng)": 95.12, - "Tatoeba (uzb-eng)": 23.19, - "Tatoeba (vie-eng)": 97.23, - "Tatoeba (war-eng)": 7.42, - "Tatoeba (wuu-eng)": 78.25, - "Tatoeba (xho-eng)": 6.53, - "Tatoeba (yid-eng)": 30.73, - "Tatoeba (yue-eng)": 77.58, - "Tatoeba (zsm-eng)": 95.8, - "Tatoeba (gsw-Latn_eng-Latn)": 25.12, - "Tatoeba (spa-Latn_eng-Latn)": 97.0, - "Tatoeba (lat-Latn_eng-Latn)": 24.25, - "Tatoeba (hun-Latn_eng-Latn)": 94.18, - "Tatoeba (eus-Latn_eng-Latn)": 31.33, - "Tatoeba (heb-Hebr_eng-Latn)": 88.26, - "Tatoeba (ang-Latn_eng-Latn)": 16.72, - "Tatoeba (swe-Latn_eng-Latn)": 95.45, - "Tatoeba (slk-Latn_eng-Latn)": 96.62, - "Tatoeba (ell-Grek_eng-Latn)": 94.93, - "Tatoeba (nld-Latn_eng-Latn)": 95.5, - "Tatoeba (cym-Latn_eng-Latn)": 22.31, - "Tatoeba (sqi-Latn_eng-Latn)": 98.57, - "Tatoeba (csb-Latn_eng-Latn)": 23.73, - "Tatoeba (ben-Beng_eng-Latn)": 64.9, - "Tatoeba (bre-Latn_eng-Latn)": 6.42, - "Tatoeba (mkd-Cyrl_eng-Latn)": 93.02, - "Tatoeba (cmn-Hans_eng-Latn)": 95.83, - "Tatoeba (deu-Latn_eng-Latn)": 97.73, - "Tatoeba (fao-Latn_eng-Latn)": 38.24, - "Tatoeba (afr-Latn_eng-Latn)": 72.96, - "Tatoeba (nno-Latn_eng-Latn)": 81.41, - "Tatoeba (jpn-Jpan_eng-Latn)": 92.51, - "Tatoeba (tzl-Latn_eng-Latn)": 34.21, - "Tatoeba (arz-Arab_eng-Latn)": 55.69, - "Tatoeba (ita-Latn_eng-Latn)": 93.76, - "Tatoeba (arq-Arab_eng-Latn)": 19.84, - "Tatoeba (uzb-Latn_eng-Latn)": 23.19, - "Tatoeba (rus-Cyrl_eng-Latn)": 92.92, - "Tatoeba (tat-Cyrl_eng-Latn)": 10.89, - "Tatoeba (fin-Latn_eng-Latn)": 95.92, - "Tatoeba (nob-Latn_eng-Latn)": 98.53, - "Tatoeba (tam-Taml_eng-Latn)": 73.6, - "Tatoeba (kur-Latn_eng-Latn)": 61.44, - "Tatoeba (wuu-Hans_eng-Latn)": 78.25, - "Tatoeba (cor-Latn_eng-Latn)": 3.53, - "Tatoeba (cha-Latn_eng-Latn)": 12.59, - "Tatoeba (hsb-Latn_eng-Latn)": 44.32, - "Tatoeba (max-Deva_eng-Latn)": 48.77, - "Tatoeba (kat-Geor_eng-Latn)": 95.46, - "Tatoeba (mal-Mlym_eng-Latn)": 88.46, - "Tatoeba (ina-Latn_eng-Latn)": 84.32, - "Tatoeba (cbk-Latn_eng-Latn)": 58.68, - "Tatoeba (yid-Hebr_eng-Latn)": 30.73, - "Tatoeba (swg-Latn_eng-Latn)": 22.8, - "Tatoeba (dtp-Latn_eng-Latn)": 5.03, - "Tatoeba (ber-Tfng_eng-Latn)": 4.88, - "Tatoeba (epo-Latn_eng-Latn)": 55.12, - "Tatoeba (mar-Deva_eng-Latn)": 93.83, - "Tatoeba (kaz-Cyrl_eng-Latn)": 61.49, - "Tatoeba (tgl-Latn_eng-Latn)": 17.67, - "Tatoeba (hrv-Latn_eng-Latn)": 97.0, - "Tatoeba (bel-Cyrl_eng-Latn)": 79.94, - "Tatoeba (pam-Latn_eng-Latn)": 5.39, - "Tatoeba (zsm-Latn_eng-Latn)": 95.8, - "Tatoeba (ces-Latn_eng-Latn)": 95.73, - "Tatoeba (gla-Latn_eng-Latn)": 4.72, - "Tatoeba (hin-Deva_eng-Latn)": 97.75, - "Tatoeba (slv-Latn_eng-Latn)": 97.08, - "Tatoeba (cat-Latn_eng-Latn)": 96.05, - "Tatoeba (war-Latn_eng-Latn)": 7.42, - "Tatoeba (hye-Armn_eng-Latn)": 94.38, - "Tatoeba (ind-Latn_eng-Latn)": 93.5, - "Tatoeba (kor-Hang_eng-Latn)": 93.07, - "Tatoeba (por-Latn_eng-Latn)": 93.02, - "Tatoeba (fry-Latn_eng-Latn)": 43.54, - "Tatoeba (dan-Latn_eng-Latn)": 96.17, - "Tatoeba (nov-Latn_eng-Latn)": 50.23, - "Tatoeba (vie-Latn_eng-Latn)": 97.23, - "Tatoeba (kzj-Latn_eng-Latn)": 5.88, - "Tatoeba (ido-Latn_eng-Latn)": 43.91, - "Tatoeba (tuk-Latn_eng-Latn)": 14.91, - "Tatoeba (glg-Latn_eng-Latn)": 95.32, - "Tatoeba (bos-Latn_eng-Latn)": 94.02, - "Tatoeba (gle-Latn_eng-Latn)": 16.85, - "Tatoeba (fra-Latn_eng-Latn)": 93.12, - "Tatoeba (lvs-Latn_eng-Latn)": 97.53, - "Tatoeba (mon-Cyrl_eng-Latn)": 96.14, - "Tatoeba (lit-Latn_eng-Latn)": 95.37, - "Tatoeba (ron-Latn_eng-Latn)": 96.43, - "Tatoeba (pms-Latn_eng-Latn)": 34.19, - "Tatoeba (lfn-Latn_eng-Latn)": 49.56, - "Tatoeba (isl-Latn_eng-Latn)": 59.25, - "Tatoeba (xho-Latn_eng-Latn)": 6.53, - "Tatoeba (orv-Cyrl_eng-Latn)": 23.77, - "Tatoeba (ukr-Cyrl_eng-Latn)": 92.67, - "Tatoeba (dsb-Latn_eng-Latn)": 36.85, - "Tatoeba (nds-Latn_eng-Latn)": 38.88, - "Tatoeba (amh-Ethi_eng-Latn)": 53.49, - "Tatoeba (yue-Hant_eng-Latn)": 77.58, - "Tatoeba (urd-Arab_eng-Latn)": 95.12, - "Tatoeba (tel-Telu_eng-Latn)": 79.73, - "Tatoeba (ile-Latn_eng-Latn)": 60.36, - "Tatoeba (jav-Latn_eng-Latn)": 23.39, - "Tatoeba (ast-Latn_eng-Latn)": 70.08, - "Tatoeba (tha-Thai_eng-Latn)": 95.99, - "Tatoeba (ara-Arab_eng-Latn)": 90.19, - "Tatoeba (pes-Arab_eng-Latn)": 93.47, - "Tatoeba (awa-Deva_eng-Latn)": 42.83, - "Tatoeba (tur-Latn_eng-Latn)": 96.17, - "Tatoeba (ceb-Latn_eng-Latn)": 7.39, - "Tatoeba (swh-Latn_eng-Latn)": 16.02, - "Tatoeba (srp-Cyrl_eng-Latn)": 94.12, - "Tatoeba (est-Latn_eng-Latn)": 98.4, - "Tatoeba (aze-Latn_eng-Latn)": 76.36, - "Tatoeba (bul-Cyrl_eng-Latn)": 93.52, - "Tatoeba (oci-Latn_eng-Latn)": 43.49, - "Tatoeba (pol-Latn_eng-Latn)": 96.95, - "Tatoeba (kab-Latn_eng-Latn)": 1.41, - "Tatoeba (khm-Khmr_eng-Latn)": 58.8, - "Tatoeba (uig-Arab_eng-Latn)": 48.35, - "Tatoeba (mhr-Cyrl_eng-Latn)": 7.57 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "paraphrase-multilingual-mpnet-base-v2", - "AllegroReviews": 33.86, - "AllegroReviews (pol-Latn)": 33.89, - "AmazonCounterfactualClassification (de)": 69.95, - "AmazonCounterfactualClassification (en)": 75.81, - "AmazonCounterfactualClassification (en-ext)": 76.25, - "AmazonCounterfactualClassification (ja)": 69.79, - "AmazonCounterfactualClassification (deu-Latn)": 69.96, - "AmazonCounterfactualClassification (jpn-Jpan)": 69.78, - "AmazonPolarityClassification": 76.41, - "AmazonReviewsClassification (de)": 39.52, - "AmazonReviewsClassification (en)": 38.52, - "AmazonReviewsClassification (es)": 39.99, - "AmazonReviewsClassification (fr)": 39.0, - "AmazonReviewsClassification (ja)": 36.64, - "AmazonReviewsClassification (zh)": 37.74, - "AmazonReviewsClassification (deu-Latn)": 39.53, - "AmazonReviewsClassification (spa-Latn)": 39.97, - "AmazonReviewsClassification (fra-Latn)": 38.98, - "AmazonReviewsClassification (jpn-Jpan)": 36.65, - "AmazonReviewsClassification (cmn-Hans)": 37.74, - "AngryTweetsClassification (dan-Latn)": 54.84, - "Banking77Classification": 81.1, - "CBD": 65.0, - "CBD (pol-Latn)": 64.97, - "DanishPoliticalCommentsClassification (dan-Latn)": 40.96, - "EmotionClassification": 45.85, - "GeoreviewClassification (rus-Cyrl)": 42.33, - "HeadlineClassification (rus-Cyrl)": 70.35, - "IFlyTek (cmn-Hans)": 43.98, - "ImdbClassification": 64.58, - "InappropriatenessClassification (rus-Cyrl)": 59.32, - "JDReview (cmn-Hans)": 70.34, - "KinopoiskClassification (rus-Cyrl)": 44.31, - "LccSentimentClassification (dan-Latn)": 58.4, - "MTOPDomainClassification (de)": 85.73, - "MTOPDomainClassification (en)": 89.24, - "MTOPDomainClassification (es)": 86.96, - "MTOPDomainClassification (fr)": 81.21, - "MTOPDomainClassification (hi)": 84.76, - "MTOPDomainClassification (th)": 82.51, - "MTOPDomainClassification (deu-Latn)": 85.73, - "MTOPDomainClassification (spa-Latn)": 86.98, - "MTOPDomainClassification (fra-Latn)": 81.21, - "MTOPDomainClassification (hin-Deva)": 84.76, - "MTOPDomainClassification (tha-Thai)": 82.51, - "MTOPIntentClassification (de)": 61.27, - "MTOPIntentClassification (en)": 68.69, - "MTOPIntentClassification (es)": 66.59, - "MTOPIntentClassification (fr)": 59.76, - "MTOPIntentClassification (hi)": 62.37, - "MTOPIntentClassification (th)": 64.8, - "MTOPIntentClassification (deu-Latn)": 61.26, - "MTOPIntentClassification (spa-Latn)": 66.6, - "MTOPIntentClassification (fra-Latn)": 59.75, - "MTOPIntentClassification (hin-Deva)": 62.38, - "MTOPIntentClassification (tha-Thai)": 64.77, - "MasakhaNEWSClassification (fra)": 78.1, - "MasakhaNEWSClassification (amh-Ethi)": 78.83, - "MasakhaNEWSClassification (eng)": 75.39, - "MasakhaNEWSClassification (fra-Latn)": 72.94, - "MasakhaNEWSClassification (hau-Latn)": 54.49, - "MasakhaNEWSClassification (ibo-Latn)": 46.79, - "MasakhaNEWSClassification (lin-Latn)": 69.77, - "MasakhaNEWSClassification (lug-Latn)": 43.05, - "MasakhaNEWSClassification (orm-Ethi)": 41.97, - "MasakhaNEWSClassification (pcm-Latn)": 90.2, - "MasakhaNEWSClassification (run-Latn)": 49.97, - "MasakhaNEWSClassification (sna-Latn)": 59.78, - "MasakhaNEWSClassification (som-Latn)": 47.65, - "MasakhaNEWSClassification (swa-Latn)": 60.42, - "MasakhaNEWSClassification (tir-Ethi)": 45.04, - "MasakhaNEWSClassification (xho-Latn)": 48.82, - "MasakhaNEWSClassification (yor-Latn)": 58.3, - "MassiveIntentClassification (pl)": 64.29, - "MassiveIntentClassification (fr)": 61.88, - "MassiveIntentClassification (mal-Mlym)": 54.34, - "MassiveIntentClassification (tel-Telu)": 52.85, - "MassiveIntentClassification (jpn-Jpan)": 63.76, - "MassiveIntentClassification (nld-Latn)": 63.57, - "MassiveIntentClassification (jav-Latn)": 36.49, - "MassiveIntentClassification (heb-Hebr)": 58.25, - "MassiveIntentClassification (tam-Taml)": 50.18, - "MassiveIntentClassification (slv-Latn)": 63.5, - "MassiveIntentClassification (tha-Thai)": 61.12, - "MassiveIntentClassification (fra-Latn)": 64.8, - "MassiveIntentClassification (ind-Latn)": 65.43, - "MassiveIntentClassification (amh-Ethi)": 41.56, - "MassiveIntentClassification (en)": 69.32, - "MassiveIntentClassification (nob-Latn)": 62.62, - "MassiveIntentClassification (kan-Knda)": 50.62, - "MassiveIntentClassification (dan-Latn)": 62.8, - "MassiveIntentClassification (ell-Grek)": 62.63, - "MassiveIntentClassification (msa-Latn)": 60.72, - "MassiveIntentClassification (ita-Latn)": 64.69, - "MassiveIntentClassification (tur-Latn)": 64.58, - "MassiveIntentClassification (ben-Beng)": 48.79, - "MassiveIntentClassification (aze-Latn)": 56.98, - "MassiveIntentClassification (tgl-Latn)": 38.83, - "MassiveIntentClassification (mon-Cyrl)": 56.61, - "MassiveIntentClassification (urd-Arab)": 56.36, - "MassiveIntentClassification (vie-Latn)": 59.71, - "MassiveIntentClassification (cmo-Hans)": 65.32, - "MassiveIntentClassification (cym-Latn)": 27.89, - "MassiveIntentClassification (rus-Cyrl)": 63.23, - "MassiveIntentClassification (mya-Mymr)": 57.08, - "MassiveIntentClassification (hun-Latn)": 63.85, - "MassiveIntentClassification (hin-Deva)": 62.79, - "MassiveIntentClassification (hye-Armn)": 57.76, - "MassiveIntentClassification (kat-Geor)": 49.88, - "MassiveIntentClassification (fin-Latn)": 62.26, - "MassiveIntentClassification (ara-Arab)": 51.43, - "MassiveIntentClassification (por-Latn)": 64.88, - "MassiveIntentClassification (pol-Latn)": 64.32, - "MassiveIntentClassification (isl-Latn)": 37.09, - "MassiveIntentClassification (afr-Latn)": 52.35, - "MassiveIntentClassification (fas-Arab)": 65.33, - "MassiveIntentClassification (khm-Khmr)": 45.48, - "MassiveIntentClassification (kor-Kore)": 61.84, - "MassiveIntentClassification (spa-Latn)": 64.45, - "MassiveIntentClassification (cmo-Hant)": 62.33, - "MassiveIntentClassification (ron-Latn)": 62.83, - "MassiveIntentClassification (sqi-Latn)": 62.48, - "MassiveIntentClassification (swa-Latn)": 31.93, - "MassiveIntentClassification (swe-Latn)": 64.71, - "MassiveIntentClassification (deu-Latn)": 59.56, - "MassiveIntentClassification (lav-Latn)": 61.29, - "MassiveScenarioClassification (pl)": 68.98, - "MassiveScenarioClassification (fr)": 67.9, - "MassiveScenarioClassification (tam-Taml)": 55.97, - "MassiveScenarioClassification (heb-Hebr)": 65.16, - "MassiveScenarioClassification (ind-Latn)": 70.73, - "MassiveScenarioClassification (afr-Latn)": 59.68, - "MassiveScenarioClassification (fin-Latn)": 67.58, - "MassiveScenarioClassification (vie-Latn)": 65.7, - "MassiveScenarioClassification (mon-Cyrl)": 60.84, - "MassiveScenarioClassification (sqi-Latn)": 69.62, - "MassiveScenarioClassification (nob-Latn)": 70.23, - "MassiveScenarioClassification (por-Latn)": 70.08, - "MassiveScenarioClassification (aze-Latn)": 61.52, - "MassiveScenarioClassification (nld-Latn)": 70.37, - "MassiveScenarioClassification (spa-Latn)": 70.4, - "MassiveScenarioClassification (mal-Mlym)": 60.14, - "MassiveScenarioClassification (cmo-Hant)": 68.71, - "MassiveScenarioClassification (fra-Latn)": 70.71, - "MassiveScenarioClassification (ita-Latn)": 69.74, - "MassiveScenarioClassification (hun-Latn)": 70.31, - "MassiveScenarioClassification (urd-Arab)": 62.92, - "MassiveScenarioClassification (cym-Latn)": 35.27, - "MassiveScenarioClassification (khm-Khmr)": 53.13, - "MassiveScenarioClassification (swa-Latn)": 37.26, - "MassiveScenarioClassification (mya-Mymr)": 63.03, - "MassiveScenarioClassification (isl-Latn)": 44.16, - "MassiveScenarioClassification (tha-Thai)": 69.44, - "MassiveScenarioClassification (kat-Geor)": 57.3, - "MassiveScenarioClassification (pol-Latn)": 68.99, - "MassiveScenarioClassification (ell-Grek)": 68.81, - "MassiveScenarioClassification (cmo-Hans)": 71.25, - "MassiveScenarioClassification (tgl-Latn)": 43.98, - "MassiveScenarioClassification (lav-Latn)": 66.28, - "MassiveScenarioClassification (jpn-Jpan)": 69.68, - "MassiveScenarioClassification (deu-Latn)": 67.35, - "MassiveScenarioClassification (ara-Arab)": 57.79, - "MassiveScenarioClassification (en)": 75.35, - "MassiveScenarioClassification (msa-Latn)": 65.85, - "MassiveScenarioClassification (tel-Telu)": 58.79, - "MassiveScenarioClassification (ben-Beng)": 54.52, - "MassiveScenarioClassification (kan-Knda)": 56.08, - "MassiveScenarioClassification (tur-Latn)": 70.41, - "MassiveScenarioClassification (kor-Kore)": 68.51, - "MassiveScenarioClassification (hye-Armn)": 63.03, - "MassiveScenarioClassification (jav-Latn)": 44.22, - "MassiveScenarioClassification (rus-Cyrl)": 69.92, - "MassiveScenarioClassification (hin-Deva)": 67.94, - "MassiveScenarioClassification (amh-Ethi)": 48.96, - "MassiveScenarioClassification (dan-Latn)": 71.04, - "MassiveScenarioClassification (fas-Arab)": 69.88, - "MassiveScenarioClassification (slv-Latn)": 70.81, - "MassiveScenarioClassification (swe-Latn)": 71.6, - "MassiveScenarioClassification (ron-Latn)": 67.94, - "MultilingualSentiment (cmn-Hans)": 66.49, - "NoRecClassification (nob-Latn)": 50.32, - "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 41.57, - "OnlineShopping (cmn-Hans)": 87.75, - "PAC": 63.76, - "PAC (pol-Latn)": 63.76, - "PolEmo2.0-IN": 62.78, - "PolEmo2.0-IN (pol-Latn)": 62.74, - "PolEmo2.0-OUT": 19.98, - "PolEmo2.0-OUT (pol-Latn)": 19.92, - "RuReviewsClassification (rus-Cyrl)": 62.33, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 56.01, - "RuSciBenchOECDClassification (rus-Cyrl)": 44.14, - "TNews (cmn-Hans)": 43.73, - "ToxicConversationsClassification": 65.56, - "TweetSentimentExtractionClassification": 59.04, - "Waimai (cmn-Hans)": 83.97 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "paraphrase-multilingual-mpnet-base-v2", - "8TagsClustering": 25.62, - "AlloProfClusteringP2P": 54.49, - "AlloProfClusteringS2S": 44.79, - "ArxivClusteringP2P": 37.78, - "ArxivClusteringS2S": 31.68, - "BiorxivClusteringP2P": 33.02, - "BiorxivClusteringS2S": 29.45, - "BlurbsClusteringP2P": 34.38, - "BlurbsClusteringS2S": 15.81, - "GeoreviewClusteringP2P (rus-Cyrl)": 56.18, - "HALClusteringS2S": 23.97, - "MLSUMClusteringP2P": 40.55, - "MLSUMClusteringP2P (rus-Cyrl)": 35.95, - "MLSUMClusteringS2S": 37.53, - "MLSUMClusteringS2S (rus-Cyrl)": 38.88, - "MasakhaNEWSClusteringP2P (fra)": 41.57, - "MasakhaNEWSClusteringP2P (amh-Ethi)": 46.85, - "MasakhaNEWSClusteringP2P (eng)": 47.3, - "MasakhaNEWSClusteringP2P (fra-Latn)": 53.3, - "MasakhaNEWSClusteringP2P (hau-Latn)": 27.61, - "MasakhaNEWSClusteringP2P (ibo-Latn)": 41.32, - "MasakhaNEWSClusteringP2P (lin-Latn)": 58.37, - "MasakhaNEWSClusteringP2P (lug-Latn)": 47.56, - "MasakhaNEWSClusteringP2P (orm-Ethi)": 24.53, - "MasakhaNEWSClusteringP2P (pcm-Latn)": 66.55, - "MasakhaNEWSClusteringP2P (run-Latn)": 51.97, - "MasakhaNEWSClusteringP2P (sna-Latn)": 45.55, - "MasakhaNEWSClusteringP2P (som-Latn)": 33.98, - "MasakhaNEWSClusteringP2P (swa-Latn)": 25.03, - "MasakhaNEWSClusteringP2P (tir-Ethi)": 48.33, - "MasakhaNEWSClusteringP2P (xho-Latn)": 29.47, - "MasakhaNEWSClusteringP2P (yor-Latn)": 28.25, - "MasakhaNEWSClusteringS2S (fra)": 30.88, - "MasakhaNEWSClusteringS2S (amh-Ethi)": 51.54, - "MasakhaNEWSClusteringS2S (eng)": 43.28, - "MasakhaNEWSClusteringS2S (fra-Latn)": 37.92, - "MasakhaNEWSClusteringS2S (hau-Latn)": 17.97, - "MasakhaNEWSClusteringS2S (ibo-Latn)": 34.56, - "MasakhaNEWSClusteringS2S (lin-Latn)": 57.43, - "MasakhaNEWSClusteringS2S (lug-Latn)": 45.22, - "MasakhaNEWSClusteringS2S (orm-Ethi)": 21.9, - "MasakhaNEWSClusteringS2S (pcm-Latn)": 62.1, - "MasakhaNEWSClusteringS2S (run-Latn)": 46.81, - "MasakhaNEWSClusteringS2S (sna-Latn)": 43.15, - "MasakhaNEWSClusteringS2S (som-Latn)": 29.44, - "MasakhaNEWSClusteringS2S (swa-Latn)": 10.31, - "MasakhaNEWSClusteringS2S (tir-Ethi)": 51.95, - "MasakhaNEWSClusteringS2S (xho-Latn)": 21.26, - "MasakhaNEWSClusteringS2S (yor-Latn)": 28.88, - "MedrxivClusteringP2P": 31.93, - "MedrxivClusteringS2S": 31.53, - "RedditClustering": 45.65, - "RedditClusteringP2P": 52.05, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 48.47, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 42.9, - "StackExchangeClustering": 52.99, - "StackExchangeClusteringP2P": 33.06, - "TenKGnadClusteringP2P": 35.96, - "TenKGnadClusteringS2S": 22.0, - "TwentyNewsgroupsClustering": 44.36 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "paraphrase-multilingual-mpnet-base-v2", - "CDSC-E": 75.76, - "CDSC-E (pol-Latn)": 75.77, - "OpusparcusPC (fr)": 93.45, - "OpusparcusPC (deu-Latn)": 97.34, - "OpusparcusPC (en)": 98.59, - "OpusparcusPC (fin-Latn)": 95.33, - "OpusparcusPC (fra-Latn)": 93.45, - "OpusparcusPC (rus-Cyrl)": 90.47, - "OpusparcusPC (swe-Latn)": 95.16, - "PPC": 93.67, - "PSC": 98.26, - "PSC (pol-Latn)": 98.26, - "PawsXPairClassification (fr)": 58.14, - "PawsXPairClassification (deu-Latn)": 55.69, - "PawsXPairClassification (en)": 60.12, - "PawsXPairClassification (spa-Latn)": 56.94, - "PawsXPairClassification (fra-Latn)": 58.14, - "PawsXPairClassification (jpn-Hira)": 49.37, - "PawsXPairClassification (kor-Hang)": 50.66, - "PawsXPairClassification (cmn-Hans)": 55.47, - "SICK-E-PL": 77.22, - "SICK-E-PL (pol-Latn)": 77.22, - "SprintDuplicateQuestions": 90.55, - "TERRa (rus-Cyrl)": 64.57, - "TwitterSemEval2015": 66.75, - "TwitterURLCorpus": 85.14 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "paraphrase-multilingual-mpnet-base-v2", - "AlloprofReranking": 54.34, - "AlloprofReranking (fra-Latn)": 67.2, - "AskUbuntuDupQuestions": 60.16, - "MMarcoReranking (cmn-Hans)": 14.57, - "MindSmallReranking": 30.15, - "RuBQReranking (rus-Cyrl)": 58.77, - "SciDocsRR": 78.09, - "StackOverflowDupQuestions": 46.78, - "SyntecReranking": 83.23, - "SyntecReranking (fra-Latn)": 80.97, - "T2Reranking (cmn-Hans)": 64.49 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "paraphrase-multilingual-mpnet-base-v2", - "AILACasedocs": 17.45, - "AILAStatutes": 22.24, - "ARCChallenge": 7.19, - "AlloprofRetrieval": 30.8, - "AlloprofRetrieval (fra-Latn)": 30.8, - "AlphaNLI": 21.87, - "ArguAna": 48.91, - "ArguAna-PL": 42.62, - "ArguAna-PL (pol-Latn)": 42.61, - "BSARDRetrieval": 0.0, - "BSARDRetrieval (fra-Latn)": 13.19, - "CQADupstackRetrieval": 31.32, - "ClimateFEVER": 15.27, - "CmedqaRetrieval (cmn-Hans)": 10.15, - "CovidRetrieval (cmn-Hans)": 28.85, - "DBPedia": 26.22, - "DBPedia-PL": 20.18, - "DuRetrieval (cmn-Hans)": 33.41, - "EcomRetrieval (cmn-Hans)": 9.69, - "FEVER": 56.76, - "FiQA-PL": 14.68, - "FiQA-PL (pol-Latn)": 14.71, - "FiQA2018": 22.96, - "GerDaLIRSmall (deu-Latn)": 3.0, - "HellaSwag": 17.53, - "HotpotQA": 37.03, - "HotpotQA-PL": 29.36, - "LEMBNarrativeQARetrieval": 16.02, - "LEMBNeedleRetrieval": 14.0, - "LEMBPasskeyRetrieval": 7.75, - "LEMBQMSumRetrieval": 12.23, - "LEMBSummScreenFDRetrieval": 41.15, - "LEMBWikimQARetrieval": 38.86, - "LeCaRDv2 (zho-Hans)": 33.91, - "LegalBenchConsumerContractsQA": 52.37, - "LegalBenchCorporateLobbying": 87.62, - "LegalQuAD (deu-Latn)": 17.8, - "LegalSummarization": 56.8, - "MMarcoRetrieval (cmn-Hans)": 44.62, - "MSMARCO": 26.6, - "MSMARCO-PL": 12.45, - "MedicalRetrieval (cmn-Hans)": 14.1, - "MintakaRetrieval (fr)": 24.45, - "MintakaRetrieval (ara-Arab)": 14.55, - "MintakaRetrieval (deu-Latn)": 25.43, - "MintakaRetrieval (spa-Latn)": 24.94, - "MintakaRetrieval (fra-Latn)": 24.45, - "MintakaRetrieval (hin-Deva)": 18.67, - "MintakaRetrieval (ita-Latn)": 25.62, - "MintakaRetrieval (jpn-Hira)": 15.46, - "MintakaRetrieval (por-Latn)": 26.15, - "NFCorpus": 25.49, - "NFCorpus-PL": 18.53, - "NFCorpus-PL (pol-Latn)": 18.54, - "NQ": 33.6, - "NQ-PL": 15.64, - "PIQA": 18.65, - "Quail": 2.98, - "Quora-PL": 79.18, - "QuoraRetrieval": 86.4, - "RARbCode": 11.02, - "RARbMath": 30.93, - "RiaNewsRetrieval (rus-Cyrl)": 51.75, - "RuBQRetrieval (rus-Cyrl)": 37.04, - "SCIDOCS": 13.97, - "SCIDOCS-PL": 11.18, - "SCIDOCS-PL (pol-Latn)": 11.17, - "SIQA": 1.21, - "SciFact": 50.3, - "SciFact-PL": 41.53, - "SciFact-PL (pol-Latn)": 41.55, - "SpartQA": 5.69, - "SyntecRetrieval": 76.0, - "SyntecRetrieval (fra-Latn)": 76.0, - "T2Retrieval (cmn-Hans)": 28.35, - "TRECCOVID": 37.87, - "TRECCOVID-PL": 35.38, - "TRECCOVID-PL (pol-Latn)": 35.43, - "TempReasonL1": 1.94, - "TempReasonL2Fact": 5.34, - "TempReasonL2Pure": 0.33, - "TempReasonL3Fact": 6.79, - "TempReasonL3Pure": 3.19, - "Touche2020": 17.4, - "VideoRetrieval (cmn-Hans)": 14.18, - "WinoGrande": 49.01, - "XPQARetrieval (fr)": 46.22, - "XPQARetrieval (ara-Arab_ara-Arab)": 24.86, - "XPQARetrieval (eng-Latn_ara-Arab)": 19.6, - "XPQARetrieval (ara-Arab_eng-Latn)": 28.21, - "XPQARetrieval (deu-Latn_deu-Latn)": 48.81, - "XPQARetrieval (eng-Latn_deu-Latn)": 31.93, - "XPQARetrieval (deu-Latn_eng-Latn)": 53.26, - "XPQARetrieval (spa-Latn_spa-Latn)": 41.08, - "XPQARetrieval (eng-Latn_spa-Latn)": 30.05, - "XPQARetrieval (spa-Latn_eng-Latn)": 43.4, - "XPQARetrieval (fra-Latn_fra-Latn)": 46.22, - "XPQARetrieval (eng-Latn_fra-Latn)": 29.55, - "XPQARetrieval (fra-Latn_eng-Latn)": 47.3, - "XPQARetrieval (hin-Deva_hin-Deva)": 50.74, - "XPQARetrieval (eng-Latn_hin-Deva)": 24.97, - "XPQARetrieval (hin-Deva_eng-Latn)": 49.24, - "XPQARetrieval (ita-Latn_ita-Latn)": 52.87, - "XPQARetrieval (eng-Latn_ita-Latn)": 33.44, - "XPQARetrieval (ita-Latn_eng-Latn)": 51.49, - "XPQARetrieval (jpn-Hira_jpn-Hira)": 53.17, - "XPQARetrieval (eng-Latn_jpn-Hira)": 26.66, - "XPQARetrieval (jpn-Hira_eng-Latn)": 49.86, - "XPQARetrieval (kor-Hang_kor-Hang)": 24.9, - "XPQARetrieval (eng-Latn_kor-Hang)": 24.5, - "XPQARetrieval (kor-Hang_eng-Latn)": 24.57, - "XPQARetrieval (pol-Latn_pol-Latn)": 29.36, - "XPQARetrieval (eng-Latn_pol-Latn)": 20.48, - "XPQARetrieval (pol-Latn_eng-Latn)": 29.31, - "XPQARetrieval (por-Latn_por-Latn)": 34.26, - "XPQARetrieval (eng-Latn_por-Latn)": 21.72, - "XPQARetrieval (por-Latn_eng-Latn)": 37.62, - "XPQARetrieval (tam-Taml_tam-Taml)": 19.8, - "XPQARetrieval (eng-Latn_tam-Taml)": 13.93, - "XPQARetrieval (tam-Taml_eng-Latn)": 18.26, - "XPQARetrieval (cmn-Hans_cmn-Hans)": 42.54, - "XPQARetrieval (eng-Latn_cmn-Hans)": 20.91, - "XPQARetrieval (cmn-Hans_eng-Latn)": 42.81 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "paraphrase-multilingual-mpnet-base-v2", - "AFQMC (cmn-Hans)": 15.69, - "ATEC (cmn-Hans)": 20.27, - "BIOSSES": 76.27, - "BQ (cmn-Hans)": 36.33, - "CDSC-R": 88.8, - "CDSC-R (pol-Latn)": 88.8, - "LCQMC (cmn-Hans)": 63.3, - "PAWSX (cmn-Hans)": 12.16, - "RUParaPhraserSTS (rus-Cyrl)": 65.74, - "RuSTSBenchmarkSTS (rus-Cyrl)": 82.46, - "SICK-R": 79.62, - "SICK-R-PL": 73.13, - "SICK-R-PL (pol-Latn)": 73.13, - "SICKFr": 75.56, - "SICKFr (fra-Latn)": 75.56, - "STS12": 77.9, - "STS13": 85.11, - "STS14": 80.81, - "STS15": 87.48, - "STS16": 83.2, - "STS17 (ar-ar)": 79.1, - "STS17 (en-ar)": 80.85, - "STS17 (en-de)": 83.28, - "STS17 (en-en)": 86.99, - "STS17 (en-tr)": 74.9, - "STS17 (es-en)": 86.11, - "STS17 (es-es)": 85.14, - "STS17 (fr-en)": 81.17, - "STS17 (it-en)": 84.24, - "STS17 (ko-ko)": 83.41, - "STS17 (nl-en)": 82.51, - "STS17 (eng-Latn_deu-Latn)": 83.28, - "STS17 (eng-Latn_tur-Latn)": 74.9, - "STS17 (eng-Latn_ara-Arab)": 80.85, - "STS17 (ara-Arab)": 79.1, - "STS17 (nld-Latn_eng-Latn)": 82.51, - "STS17 (fra-Latn_eng-Latn)": 81.17, - "STS17 (ita-Latn_eng-Latn)": 84.24, - "STS17 (spa-Latn_eng-Latn)": 86.11, - "STS17 (spa-Latn)": 85.14, - "STS17 (kor-Hang)": 83.41, - "STS22 (pl)": 33.64, - "STS22 (fr)": 74.3, - "STS22 (spa-Latn)": 59.91, - "STS22 (en)": 63.52, - "STS22 (spa-Latn_ita-Latn)": 53.7, - "STS22 (pol-Latn)": 33.65, - "STS22 (ara-Arab)": 52.19, - "STS22 (deu-Latn)": 46.7, - "STS22 (fra-Latn)": 74.3, - "STS22 (deu-Latn_pol-Latn)": 40.53, - "STS22 (tur-Latn)": 56.3, - "STS22 (cmn-Hans_eng-Latn)": 67.96, - "STS22 (pol-Latn_eng-Latn)": 73.07, - "STS22 (rus-Cyrl)": 58.74, - "STS22 (cmn-Hans)": 61.75, - "STS22 (spa-Latn_eng-Latn)": 70.26, - "STS22 (fra-Latn_pol-Latn)": 84.52, - "STS22 (deu-Latn_eng-Latn)": 50.81, - "STS22 (deu-Latn_fra-Latn)": 62.34, - "STS22 (ita-Latn)": 60.65, - "STSB (cmn-Hans)": 80.84, - "STSBenchmark": 86.82, - "STSBenchmarkMultilingualSTS (fr)": 84.69, - "STSBenchmarkMultilingualSTS (nld-Latn)": 83.36, - "STSBenchmarkMultilingualSTS (deu-Latn)": 83.56, - "STSBenchmarkMultilingualSTS (fra-Latn)": 84.69, - "STSBenchmarkMultilingualSTS (spa-Latn)": 84.61, - "STSBenchmarkMultilingualSTS (cmn-Hans)": 81.98, - "STSBenchmarkMultilingualSTS (en)": 86.82, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 82.45, - "STSBenchmarkMultilingualSTS (por-Latn)": 84.0, - "STSBenchmarkMultilingualSTS (ita-Latn)": 84.09, - "STSBenchmarkMultilingualSTS (pol-Latn)": 81.46 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "paraphrase-multilingual-mpnet-base-v2", - "SummEval": 31.57, - "SummEvalFr": 29.47, - "SummEvalFr (fra-Latn)": 29.47 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "paraphrase-multilingual-mpnet-base-v2" - } - ] - } - }, - "DanskBERT": { - "BitextMining": { - "f1": [ - { - "Model": "DanskBERT", - "BornholmBitextMining": 6.34 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "DanskBERT", - "AngryTweetsClassification": 54.28, - "DKHateClassification": 59.3, - "DanishPoliticalCommentsClassification": 39.81, - "LccSentimentClassification": 58.0, - "MassiveIntentClassification (da)": 54.68, - "MassiveIntentClassification (nb)": 45.38, - "MassiveIntentClassification (sv)": 40.82, - "MassiveScenarioClassification (da)": 59.56, - "MassiveScenarioClassification (nb)": 47.55, - "MassiveScenarioClassification (sv)": 40.14, - "NoRecClassification": 46.06, - "NordicLangClassification": 74.25, - "NorwegianParliament": 56.79, - "ScalaDaClassification": 66.59, - "ScalaNbClassification": 59.99 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "DanskBERT" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "DanskBERT" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "DanskBERT" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "DanskBERT" - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "DanskBERT" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "DanskBERT" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "DanskBERT" - } - ] - } - }, - "dfm-encoder-large-v1": { - "BitextMining": { - "f1": [ - { - "Model": "dfm-encoder-large-v1", - "BornholmBitextMining": 11.65 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "dfm-encoder-large-v1", - "AngryTweetsClassification": 53.8, - "DKHateClassification": 60.09, - "DanishPoliticalCommentsClassification": 36.6, - "LccSentimentClassification": 57.33, - "MassiveIntentClassification (da)": 60.55, - "MassiveIntentClassification (nb)": 52.49, - "MassiveIntentClassification (sv)": 49.74, - "MassiveScenarioClassification (da)": 64.16, - "MassiveScenarioClassification (nb)": 54.59, - "MassiveScenarioClassification (sv)": 50.1, - "NoRecClassification": 48.3, - "NordicLangClassification": 77.68, - "NorwegianParliament": 58.78, - "ScalaDaClassification": 63.08, - "ScalaNbClassification": 58.95 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "dfm-encoder-large-v1" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "dfm-encoder-large-v1" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "dfm-encoder-large-v1" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "dfm-encoder-large-v1" - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "dfm-encoder-large-v1" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "dfm-encoder-large-v1" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "dfm-encoder-large-v1" - } - ] - } - }, - "instructor-xl": { - "BitextMining": { - "f1": [ - { - "Model": "instructor-xl" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "instructor-xl" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "instructor-xl" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "instructor-xl" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "instructor-xl" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "instructor-xl", - "BrightRetrieval (aops)": 8.26, - "BrightRetrieval (robotics)": 17.39, - "BrightRetrieval (economics)": 22.81, - "BrightRetrieval (stackoverflow)": 19.06, - "BrightRetrieval (leetcode)": 27.5, - "BrightRetrieval (theoremqa_questions)": 14.59, - "BrightRetrieval (psychology)": 27.43, - "BrightRetrieval (biology)": 21.91, - "BrightRetrieval (theoremqa_theorems)": 6.5, - "BrightRetrieval (earth_science)": 34.35, - "BrightRetrieval (sustainable_living)": 18.82, - "BrightRetrieval (pony)": 5.02 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "instructor-xl" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "instructor-xl" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "instructor-xl", - "Core17InstructionRetrieval": 0.69, - "News21InstructionRetrieval": -0.9, - "Robust04InstructionRetrieval": -8.08 - } - ] - } - }, - "GritLM-7B": { - "BitextMining": { - "f1": [ - { - "Model": "GritLM-7B", - "BornholmBitextMining (dan-Latn)": 45.13, - "Tatoeba (csb-Latn_eng-Latn)": 50.13, - "Tatoeba (ceb-Latn_eng-Latn)": 33.5, - "Tatoeba (cmn-Hans_eng-Latn)": 94.08, - "Tatoeba (uzb-Latn_eng-Latn)": 41.69, - "Tatoeba (kur-Latn_eng-Latn)": 27.94, - "Tatoeba (ita-Latn_eng-Latn)": 91.2, - "Tatoeba (lvs-Latn_eng-Latn)": 53.54, - "Tatoeba (yid-Hebr_eng-Latn)": 17.13, - "Tatoeba (gle-Latn_eng-Latn)": 48.14, - "Tatoeba (ast-Latn_eng-Latn)": 79.11, - "Tatoeba (ang-Latn_eng-Latn)": 76.84, - "Tatoeba (jav-Latn_eng-Latn)": 26.6, - "Tatoeba (ina-Latn_eng-Latn)": 91.24, - "Tatoeba (nob-Latn_eng-Latn)": 93.53, - "Tatoeba (swe-Latn_eng-Latn)": 90.43, - "Tatoeba (lfn-Latn_eng-Latn)": 62.23, - "Tatoeba (fin-Latn_eng-Latn)": 85.76, - "Tatoeba (fry-Latn_eng-Latn)": 61.16, - "Tatoeba (gsw-Latn_eng-Latn)": 53.28, - "Tatoeba (rus-Cyrl_eng-Latn)": 91.82, - "Tatoeba (tat-Cyrl_eng-Latn)": 24.46, - "Tatoeba (mal-Mlym_eng-Latn)": 33.79, - "Tatoeba (hrv-Latn_eng-Latn)": 91.04, - "Tatoeba (ind-Latn_eng-Latn)": 90.05, - "Tatoeba (tam-Taml_eng-Latn)": 46.27, - "Tatoeba (kaz-Cyrl_eng-Latn)": 36.27, - "Tatoeba (uig-Arab_eng-Latn)": 22.6, - "Tatoeba (slv-Latn_eng-Latn)": 82.71, - "Tatoeba (pms-Latn_eng-Latn)": 50.41, - "Tatoeba (lit-Latn_eng-Latn)": 56.36, - "Tatoeba (cha-Latn_eng-Latn)": 34.69, - "Tatoeba (est-Latn_eng-Latn)": 46.73, - "Tatoeba (mhr-Cyrl_eng-Latn)": 10.8, - "Tatoeba (dan-Latn_eng-Latn)": 92.01, - "Tatoeba (pol-Latn_eng-Latn)": 95.6, - "Tatoeba (nov-Latn_eng-Latn)": 64.85, - "Tatoeba (swh-Latn_eng-Latn)": 46.09, - "Tatoeba (tha-Thai_eng-Latn)": 81.25, - "Tatoeba (arz-Arab_eng-Latn)": 52.97, - "Tatoeba (epo-Latn_eng-Latn)": 76.87, - "Tatoeba (deu-Latn_eng-Latn)": 98.02, - "Tatoeba (hye-Armn_eng-Latn)": 35.94, - "Tatoeba (afr-Latn_eng-Latn)": 79.17, - "Tatoeba (gla-Latn_eng-Latn)": 40.8, - "Tatoeba (isl-Latn_eng-Latn)": 74.94, - "Tatoeba (awa-Deva_eng-Latn)": 44.31, - "Tatoeba (ido-Latn_eng-Latn)": 65.69, - "Tatoeba (kor-Hang_eng-Latn)": 87.43, - "Tatoeba (amh-Ethi_eng-Latn)": 6.18, - "Tatoeba (eus-Latn_eng-Latn)": 31.88, - "Tatoeba (mkd-Cyrl_eng-Latn)": 73.82, - "Tatoeba (tur-Latn_eng-Latn)": 86.62, - "Tatoeba (pes-Arab_eng-Latn)": 78.98, - "Tatoeba (heb-Hebr_eng-Latn)": 61.75, - "Tatoeba (aze-Latn_eng-Latn)": 64.11, - "Tatoeba (hun-Latn_eng-Latn)": 88.54, - "Tatoeba (bul-Cyrl_eng-Latn)": 90.37, - "Tatoeba (kab-Latn_eng-Latn)": 2.9, - "Tatoeba (cat-Latn_eng-Latn)": 90.66, - "Tatoeba (dsb-Latn_eng-Latn)": 51.72, - "Tatoeba (kat-Geor_eng-Latn)": 38.42, - "Tatoeba (urd-Arab_eng-Latn)": 68.02, - "Tatoeba (wuu-Hans_eng-Latn)": 80.28, - "Tatoeba (oci-Latn_eng-Latn)": 58.12, - "Tatoeba (arq-Arab_eng-Latn)": 30.52, - "Tatoeba (ron-Latn_eng-Latn)": 90.29, - "Tatoeba (bos-Latn_eng-Latn)": 87.33, - "Tatoeba (nds-Latn_eng-Latn)": 64.54, - "Tatoeba (tgl-Latn_eng-Latn)": 83.24, - "Tatoeba (glg-Latn_eng-Latn)": 86.69, - "Tatoeba (ben-Beng_eng-Latn)": 61.32, - "Tatoeba (khm-Khmr_eng-Latn)": 16.4, - "Tatoeba (ukr-Cyrl_eng-Latn)": 90.19, - "Tatoeba (max-Deva_eng-Latn)": 51.87, - "Tatoeba (lat-Latn_eng-Latn)": 80.43, - "Tatoeba (xho-Latn_eng-Latn)": 28.43, - "Tatoeba (spa-Latn_eng-Latn)": 96.75, - "Tatoeba (tzl-Latn_eng-Latn)": 42.85, - "Tatoeba (ara-Arab_eng-Latn)": 76.77, - "Tatoeba (vie-Latn_eng-Latn)": 91.32, - "Tatoeba (ces-Latn_eng-Latn)": 92.02, - "Tatoeba (jpn-Jpan_eng-Latn)": 91.9, - "Tatoeba (bel-Cyrl_eng-Latn)": 76.21, - "Tatoeba (mon-Cyrl_eng-Latn)": 27.38, - "Tatoeba (nld-Latn_eng-Latn)": 94.96, - "Tatoeba (war-Latn_eng-Latn)": 27.75, - "Tatoeba (bre-Latn_eng-Latn)": 12.59, - "Tatoeba (por-Latn_eng-Latn)": 93.41, - "Tatoeba (ile-Latn_eng-Latn)": 76.72, - "Tatoeba (mar-Deva_eng-Latn)": 51.54, - "Tatoeba (fao-Latn_eng-Latn)": 62.03, - "Tatoeba (slk-Latn_eng-Latn)": 84.96, - "Tatoeba (tel-Telu_eng-Latn)": 24.26, - "Tatoeba (cym-Latn_eng-Latn)": 50.03, - "Tatoeba (srp-Cyrl_eng-Latn)": 88.45, - "Tatoeba (swg-Latn_eng-Latn)": 52.09, - "Tatoeba (hin-Deva_eng-Latn)": 84.19, - "Tatoeba (yue-Hant_eng-Latn)": 79.5, - "Tatoeba (fra-Latn_eng-Latn)": 92.47, - "Tatoeba (cor-Latn_eng-Latn)": 6.97, - "Tatoeba (hsb-Latn_eng-Latn)": 64.48, - "Tatoeba (zsm-Latn_eng-Latn)": 90.06, - "Tatoeba (ber-Tfng_eng-Latn)": 6.2, - "Tatoeba (pam-Latn_eng-Latn)": 12.11, - "Tatoeba (kzj-Latn_eng-Latn)": 9.61, - "Tatoeba (dtp-Latn_eng-Latn)": 8.37, - "Tatoeba (nno-Latn_eng-Latn)": 80.89, - "Tatoeba (ell-Grek_eng-Latn)": 80.13, - "Tatoeba (orv-Cyrl_eng-Latn)": 45.88, - "Tatoeba (sqi-Latn_eng-Latn)": 54.37, - "Tatoeba (tuk-Latn_eng-Latn)": 30.47, - "Tatoeba (cbk-Latn_eng-Latn)": 67.64 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "GritLM-7B", - "AllegroReviews (pol-Latn)": 37.32, - "AmazonCounterfactualClassification (en-ext)": 70.34, - "AmazonCounterfactualClassification (en)": 71.1, - "AmazonCounterfactualClassification (deu-Latn)": 67.63, - "AmazonCounterfactualClassification (jpn-Jpan)": 73.3, - "AmazonPolarityClassification": 86.69, - "AmazonReviewsClassification (en)": 45.51, - "AmazonReviewsClassification (deu-Latn)": 43.77, - "AmazonReviewsClassification (spa-Latn)": 43.0, - "AmazonReviewsClassification (fra-Latn)": 44.15, - "AmazonReviewsClassification (jpn-Jpan)": 41.49, - "AmazonReviewsClassification (cmn-Hans)": 35.34, - "AngryTweetsClassification (dan-Latn)": 54.68, - "Banking77Classification": 79.36, - "CBD (pol-Latn)": 70.98, - "DanishPoliticalCommentsClassification (dan-Latn)": 37.69, - "EmotionClassification": 48.79, - "GeoreviewClassification (rus-Cyrl)": 45.72, - "HeadlineClassification (rus-Cyrl)": 78.05, - "IFlyTek (cmn-Hans)": 48.49, - "ImdbClassification": 82.25, - "InappropriatenessClassification (rus-Cyrl)": 60.11, - "JDReview (cmn-Hans)": 84.02, - "KinopoiskClassification (rus-Cyrl)": 56.14, - "LccSentimentClassification (dan-Latn)": 57.2, - "MTOPDomainClassification (en)": 92.67, - "MTOPDomainClassification (deu-Latn)": 88.32, - "MTOPDomainClassification (spa-Latn)": 88.45, - "MTOPDomainClassification (fra-Latn)": 88.44, - "MTOPDomainClassification (hin-Deva)": 86.89, - "MTOPDomainClassification (tha-Thai)": 82.97, - "MTOPIntentClassification (en)": 69.77, - "MTOPIntentClassification (deu-Latn)": 69.53, - "MTOPIntentClassification (spa-Latn)": 67.49, - "MTOPIntentClassification (fra-Latn)": 65.93, - "MTOPIntentClassification (hin-Deva)": 59.47, - "MTOPIntentClassification (tha-Thai)": 65.14, - "MasakhaNEWSClassification (amh-Ethi)": 53.06, - "MasakhaNEWSClassification (eng)": 77.57, - "MasakhaNEWSClassification (fra-Latn)": 77.39, - "MasakhaNEWSClassification (hau-Latn)": 74.66, - "MasakhaNEWSClassification (ibo-Latn)": 68.64, - "MasakhaNEWSClassification (lin-Latn)": 74.23, - "MasakhaNEWSClassification (lug-Latn)": 72.33, - "MasakhaNEWSClassification (orm-Ethi)": 77.6, - "MasakhaNEWSClassification (pcm-Latn)": 91.28, - "MasakhaNEWSClassification (run-Latn)": 76.3, - "MasakhaNEWSClassification (sna-Latn)": 85.99, - "MasakhaNEWSClassification (som-Latn)": 63.71, - "MasakhaNEWSClassification (swa-Latn)": 73.4, - "MasakhaNEWSClassification (tir-Ethi)": 34.41, - "MasakhaNEWSClassification (xho-Latn)": 83.27, - "MasakhaNEWSClassification (yor-Latn)": 80.92, - "MassiveIntentClassification (mya-Mymr)": 36.92, - "MassiveIntentClassification (en)": 71.52, - "MassiveIntentClassification (slv-Latn)": 63.08, - "MassiveIntentClassification (sqi-Latn)": 50.98, - "MassiveIntentClassification (kor-Kore)": 65.71, - "MassiveIntentClassification (aze-Latn)": 56.24, - "MassiveIntentClassification (isl-Latn)": 51.96, - "MassiveIntentClassification (hin-Deva)": 61.18, - "MassiveIntentClassification (dan-Latn)": 65.39, - "MassiveIntentClassification (vie-Latn)": 62.05, - "MassiveIntentClassification (heb-Hebr)": 57.71, - "MassiveIntentClassification (tur-Latn)": 65.26, - "MassiveIntentClassification (cmo-Hans)": 67.43, - "MassiveIntentClassification (khm-Khmr)": 38.86, - "MassiveIntentClassification (deu-Latn)": 67.75, - "MassiveIntentClassification (fas-Arab)": 65.98, - "MassiveIntentClassification (jav-Latn)": 50.25, - "MassiveIntentClassification (nld-Latn)": 66.82, - "MassiveIntentClassification (jpn-Jpan)": 68.56, - "MassiveIntentClassification (ita-Latn)": 68.04, - "MassiveIntentClassification (cym-Latn)": 48.59, - "MassiveIntentClassification (pol-Latn)": 67.97, - "MassiveIntentClassification (fin-Latn)": 60.55, - "MassiveIntentClassification (tha-Thai)": 58.99, - "MassiveIntentClassification (lav-Latn)": 51.12, - "MassiveIntentClassification (mal-Mlym)": 43.57, - "MassiveIntentClassification (hun-Latn)": 63.48, - "MassiveIntentClassification (ind-Latn)": 65.58, - "MassiveIntentClassification (por-Latn)": 67.76, - "MassiveIntentClassification (tel-Telu)": 44.73, - "MassiveIntentClassification (amh-Ethi)": 34.73, - "MassiveIntentClassification (kan-Knda)": 44.51, - "MassiveIntentClassification (spa-Latn)": 66.45, - "MassiveIntentClassification (urd-Arab)": 54.11, - "MassiveIntentClassification (kat-Geor)": 42.01, - "MassiveIntentClassification (tam-Taml)": 43.48, - "MassiveIntentClassification (afr-Latn)": 59.48, - "MassiveIntentClassification (rus-Cyrl)": 69.41, - "MassiveIntentClassification (tgl-Latn)": 61.83, - "MassiveIntentClassification (ell-Grek)": 60.45, - "MassiveIntentClassification (hye-Armn)": 43.12, - "MassiveIntentClassification (ara-Arab)": 54.46, - "MassiveIntentClassification (fra-Latn)": 67.69, - "MassiveIntentClassification (mon-Cyrl)": 40.84, - "MassiveIntentClassification (msa-Latn)": 62.61, - "MassiveIntentClassification (nob-Latn)": 63.58, - "MassiveIntentClassification (ben-Beng)": 52.6, - "MassiveIntentClassification (cmo-Hant)": 62.06, - "MassiveIntentClassification (ron-Latn)": 62.45, - "MassiveIntentClassification (swe-Latn)": 67.73, - "MassiveIntentClassification (swa-Latn)": 50.1, - "MassiveScenarioClassification (cmo-Hant)": 67.7, - "MassiveScenarioClassification (kat-Geor)": 49.31, - "MassiveScenarioClassification (ind-Latn)": 72.36, - "MassiveScenarioClassification (amh-Ethi)": 42.0, - "MassiveScenarioClassification (ita-Latn)": 71.86, - "MassiveScenarioClassification (tur-Latn)": 68.71, - "MassiveScenarioClassification (tel-Telu)": 50.8, - "MassiveScenarioClassification (ell-Grek)": 67.42, - "MassiveScenarioClassification (deu-Latn)": 73.64, - "MassiveScenarioClassification (sqi-Latn)": 57.5, - "MassiveScenarioClassification (cym-Latn)": 57.36, - "MassiveScenarioClassification (spa-Latn)": 71.12, - "MassiveScenarioClassification (nld-Latn)": 72.47, - "MassiveScenarioClassification (swa-Latn)": 58.93, - "MassiveScenarioClassification (cmo-Hans)": 71.91, - "MassiveScenarioClassification (fin-Latn)": 65.91, - "MassiveScenarioClassification (por-Latn)": 70.99, - "MassiveScenarioClassification (hun-Latn)": 69.68, - "MassiveScenarioClassification (slv-Latn)": 70.25, - "MassiveScenarioClassification (urd-Arab)": 62.48, - "MassiveScenarioClassification (hye-Armn)": 49.32, - "MassiveScenarioClassification (pol-Latn)": 71.86, - "MassiveScenarioClassification (khm-Khmr)": 45.52, - "MassiveScenarioClassification (kan-Knda)": 49.51, - "MassiveScenarioClassification (hin-Deva)": 66.18, - "MassiveScenarioClassification (heb-Hebr)": 63.3, - "MassiveScenarioClassification (rus-Cyrl)": 73.87, - "MassiveScenarioClassification (mal-Mlym)": 48.53, - "MassiveScenarioClassification (afr-Latn)": 67.34, - "MassiveScenarioClassification (vie-Latn)": 69.19, - "MassiveScenarioClassification (fra-Latn)": 70.79, - "MassiveScenarioClassification (ben-Beng)": 58.75, - "MassiveScenarioClassification (lav-Latn)": 57.3, - "MassiveScenarioClassification (tam-Taml)": 50.9, - "MassiveScenarioClassification (en)": 73.87, - "MassiveScenarioClassification (aze-Latn)": 61.74, - "MassiveScenarioClassification (swe-Latn)": 73.24, - "MassiveScenarioClassification (kor-Kore)": 70.76, - "MassiveScenarioClassification (ron-Latn)": 68.54, - "MassiveScenarioClassification (msa-Latn)": 69.72, - "MassiveScenarioClassification (mya-Mymr)": 44.25, - "MassiveScenarioClassification (fas-Arab)": 70.5, - "MassiveScenarioClassification (tha-Thai)": 64.51, - "MassiveScenarioClassification (jpn-Jpan)": 72.81, - "MassiveScenarioClassification (nob-Latn)": 69.75, - "MassiveScenarioClassification (tgl-Latn)": 69.0, - "MassiveScenarioClassification (dan-Latn)": 71.51, - "MassiveScenarioClassification (ara-Arab)": 61.51, - "MassiveScenarioClassification (jav-Latn)": 58.24, - "MassiveScenarioClassification (isl-Latn)": 61.61, - "MassiveScenarioClassification (mon-Cyrl)": 46.6, - "MultilingualSentiment (cmn-Hans)": 68.13, - "NoRecClassification (nob-Latn)": 52.05, - "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 63.6, - "OnlineShopping (cmn-Hans)": 86.99, - "PAC (pol-Latn)": 68.09, - "PolEmo2.0-IN (pol-Latn)": 66.07, - "PolEmo2.0-OUT (pol-Latn)": 32.94, - "RuReviewsClassification (rus-Cyrl)": 61.42, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 58.93, - "RuSciBenchOECDClassification (rus-Cyrl)": 45.83, - "TNews (cmn-Hans)": 49.94, - "ToxicConversationsClassification": 63.9, - "TweetSentimentExtractionClassification": 57.14, - "Waimai (cmn-Hans)": 84.92 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "GritLM-7B", - "GeoreviewClusteringP2P (rus-Cyrl)": 74.06, - "MasakhaNEWSClusteringP2P (amh-Ethi)": 45.2, - "MasakhaNEWSClusteringP2P (eng)": 70.5, - "MasakhaNEWSClusteringP2P (fra-Latn)": 73.54, - "MasakhaNEWSClusteringP2P (hau-Latn)": 51.33, - "MasakhaNEWSClusteringP2P (ibo-Latn)": 66.75, - "MasakhaNEWSClusteringP2P (lin-Latn)": 59.57, - "MasakhaNEWSClusteringP2P (lug-Latn)": 58.93, - "MasakhaNEWSClusteringP2P (orm-Ethi)": 54.38, - "MasakhaNEWSClusteringP2P (pcm-Latn)": 92.67, - "MasakhaNEWSClusteringP2P (run-Latn)": 59.51, - "MasakhaNEWSClusteringP2P (sna-Latn)": 68.86, - "MasakhaNEWSClusteringP2P (som-Latn)": 41.42, - "MasakhaNEWSClusteringP2P (swa-Latn)": 33.61, - "MasakhaNEWSClusteringP2P (tir-Ethi)": 51.68, - "MasakhaNEWSClusteringP2P (xho-Latn)": 46.65, - "MasakhaNEWSClusteringP2P (yor-Latn)": 52.39, - "MasakhaNEWSClusteringS2S (amh-Ethi)": 43.39, - "MasakhaNEWSClusteringS2S (eng)": 65.85, - "MasakhaNEWSClusteringS2S (fra-Latn)": 68.87, - "MasakhaNEWSClusteringS2S (hau-Latn)": 33.02, - "MasakhaNEWSClusteringS2S (ibo-Latn)": 64.55, - "MasakhaNEWSClusteringS2S (lin-Latn)": 72.01, - "MasakhaNEWSClusteringS2S (lug-Latn)": 47.42, - "MasakhaNEWSClusteringS2S (orm-Ethi)": 32.59, - "MasakhaNEWSClusteringS2S (pcm-Latn)": 97.82, - "MasakhaNEWSClusteringS2S (run-Latn)": 59.41, - "MasakhaNEWSClusteringS2S (sna-Latn)": 71.58, - "MasakhaNEWSClusteringS2S (som-Latn)": 40.91, - "MasakhaNEWSClusteringS2S (swa-Latn)": 33.54, - "MasakhaNEWSClusteringS2S (tir-Ethi)": 45.32, - "MasakhaNEWSClusteringS2S (xho-Latn)": 28.94, - "MasakhaNEWSClusteringS2S (yor-Latn)": 63.26, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 60.01, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 51.66 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "GritLM-7B", - "CDSC-E (pol-Latn)": 72.65, - "OpusparcusPC (deu-Latn)": 96.65, - "OpusparcusPC (en)": 98.57, - "OpusparcusPC (fin-Latn)": 90.41, - "OpusparcusPC (fra-Latn)": 93.41, - "OpusparcusPC (rus-Cyrl)": 88.63, - "OpusparcusPC (swe-Latn)": 94.04, - "PSC (pol-Latn)": 99.43, - "PawsXPairClassification (deu-Latn)": 58.5, - "PawsXPairClassification (en)": 63.78, - "PawsXPairClassification (spa-Latn)": 59.15, - "PawsXPairClassification (fra-Latn)": 61.89, - "PawsXPairClassification (jpn-Hira)": 51.46, - "PawsXPairClassification (kor-Hang)": 52.15, - "PawsXPairClassification (cmn-Hans)": 57.66, - "SICK-E-PL (pol-Latn)": 75.98, - "SprintDuplicateQuestions": 93.06, - "TERRa (rus-Cyrl)": 59.39, - "TwitterSemEval2015": 71.24, - "TwitterURLCorpus": 84.54 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "GritLM-7B", - "AlloprofReranking (fra-Latn)": 77.95, - "AskUbuntuDupQuestions": 61.11, - "MMarcoReranking (cmn-Hans)": 21.7, - "MindSmallReranking": 31.53, - "RuBQReranking (rus-Cyrl)": 72.41, - "SciDocsRR": 84.78, - "StackOverflowDupQuestions": 50.95, - "SyntecReranking (fra-Latn)": 83.32, - "T2Reranking (cmn-Hans)": 65.63 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "GritLM-7B", - "AILACasedocs": 35.31, - "AILAStatutes": 41.8, - "ARCChallenge": 26.68, - "AlloprofRetrieval (fra-Latn)": 55.42, - "AlphaNLI": 34.0, - "ArguAna": 63.17, - "ArguAna-PL (pol-Latn)": 48.89, - "BSARDRetrieval (fra-Latn)": 26.63, - "BrightRetrieval (pony)": 21.98, - "BrightRetrieval (robotics)": 17.31, - "BrightRetrieval (economics)": 19.0, - "BrightRetrieval (theoremqa_questions)": 23.34, - "BrightRetrieval (leetcode)": 29.85, - "BrightRetrieval (earth_science)": 32.77, - "BrightRetrieval (stackoverflow)": 11.62, - "BrightRetrieval (sustainable_living)": 18.04, - "BrightRetrieval (biology)": 25.04, - "BrightRetrieval (psychology)": 19.92, - "BrightRetrieval (theoremqa_theorems)": 17.41, - "BrightRetrieval (aops)": 8.91, - "CmedqaRetrieval (cmn-Hans)": 35.58, - "CovidRetrieval (cmn-Hans)": 73.47, - "DuRetrieval (cmn-Hans)": 88.18, - "EcomRetrieval (cmn-Hans)": 54.33, - "FiQA-PL (pol-Latn)": 38.04, - "FiQA2018": 59.91, - "GerDaLIRSmall (deu-Latn)": 20.61, - "HellaSwag": 39.45, - "LEMBNarrativeQARetrieval": 41.46, - "LEMBNeedleRetrieval": 33.25, - "LEMBPasskeyRetrieval": 38.25, - "LEMBQMSumRetrieval": 30.32, - "LEMBSummScreenFDRetrieval": 78.49, - "LEMBWikimQARetrieval": 60.8, - "LeCaRDv2 (zho-Hans)": 64.05, - "LegalBenchConsumerContractsQA": 82.1, - "LegalBenchCorporateLobbying": 95.0, - "LegalQuAD (deu-Latn)": 44.18, - "LegalSummarization": 70.64, - "MMarcoRetrieval (cmn-Hans)": 76.54, - "MedicalRetrieval (cmn-Hans)": 55.81, - "MintakaRetrieval (ara-Arab)": 25.88, - "MintakaRetrieval (deu-Latn)": 55.66, - "MintakaRetrieval (spa-Latn)": 53.36, - "MintakaRetrieval (fra-Latn)": 51.68, - "MintakaRetrieval (hin-Deva)": 26.06, - "MintakaRetrieval (ita-Latn)": 54.91, - "MintakaRetrieval (jpn-Hira)": 34.1, - "MintakaRetrieval (por-Latn)": 54.91, - "NFCorpus": 40.86, - "NFCorpus-PL (pol-Latn)": 32.88, - "PIQA": 44.35, - "Quail": 11.69, - "RARbCode": 84.0, - "RARbMath": 82.35, - "RuBQRetrieval (rus-Cyrl)": 70.94, - "SCIDOCS": 24.4, - "SCIDOCS-PL (pol-Latn)": 18.39, - "SIQA": 7.23, - "SciFact": 79.13, - "SciFact-PL (pol-Latn)": 73.22, - "SpartQA": 9.29, - "SyntecRetrieval (fra-Latn)": 89.48, - "T2Retrieval (cmn-Hans)": 82.96, - "TRECCOVID": 74.36, - "TRECCOVID-PL (pol-Latn)": 58.01, - "TempReasonL1": 7.15, - "TempReasonL2Fact": 58.38, - "TempReasonL2Pure": 11.22, - "TempReasonL3Fact": 44.29, - "TempReasonL3Pure": 14.15, - "Touche2020": 27.81, - "VideoRetrieval (cmn-Hans)": 53.85, - "WinoGrande": 53.74, - "XPQARetrieval (ara-Arab_ara-Arab)": 45.21, - "XPQARetrieval (eng-Latn_ara-Arab)": 27.32, - "XPQARetrieval (ara-Arab_eng-Latn)": 39.43, - "XPQARetrieval (deu-Latn_deu-Latn)": 76.58, - "XPQARetrieval (eng-Latn_deu-Latn)": 55.44, - "XPQARetrieval (deu-Latn_eng-Latn)": 72.56, - "XPQARetrieval (spa-Latn_spa-Latn)": 64.55, - "XPQARetrieval (eng-Latn_spa-Latn)": 45.49, - "XPQARetrieval (spa-Latn_eng-Latn)": 61.03, - "XPQARetrieval (fra-Latn_fra-Latn)": 70.85, - "XPQARetrieval (eng-Latn_fra-Latn)": 48.14, - "XPQARetrieval (fra-Latn_eng-Latn)": 66.96, - "XPQARetrieval (hin-Deva_hin-Deva)": 74.75, - "XPQARetrieval (eng-Latn_hin-Deva)": 25.61, - "XPQARetrieval (hin-Deva_eng-Latn)": 63.9, - "XPQARetrieval (ita-Latn_ita-Latn)": 76.53, - "XPQARetrieval (eng-Latn_ita-Latn)": 46.88, - "XPQARetrieval (ita-Latn_eng-Latn)": 71.03, - "XPQARetrieval (jpn-Hira_jpn-Hira)": 72.27, - "XPQARetrieval (eng-Latn_jpn-Hira)": 41.94, - "XPQARetrieval (jpn-Hira_eng-Latn)": 69.42, - "XPQARetrieval (kor-Hang_kor-Hang)": 40.64, - "XPQARetrieval (eng-Latn_kor-Hang)": 32.68, - "XPQARetrieval (kor-Hang_eng-Latn)": 36.0, - "XPQARetrieval (pol-Latn_pol-Latn)": 50.74, - "XPQARetrieval (eng-Latn_pol-Latn)": 33.14, - "XPQARetrieval (pol-Latn_eng-Latn)": 48.06, - "XPQARetrieval (por-Latn_por-Latn)": 49.86, - "XPQARetrieval (eng-Latn_por-Latn)": 33.01, - "XPQARetrieval (por-Latn_eng-Latn)": 48.45, - "XPQARetrieval (tam-Taml_tam-Taml)": 41.78, - "XPQARetrieval (eng-Latn_tam-Taml)": 10.95, - "XPQARetrieval (tam-Taml_eng-Latn)": 21.28, - "XPQARetrieval (cmn-Hans_cmn-Hans)": 65.29, - "XPQARetrieval (eng-Latn_cmn-Hans)": 35.86, - "XPQARetrieval (cmn-Hans_eng-Latn)": 58.12 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "GritLM-7B", - "AFQMC (cmn-Hans)": 32.65, - "ATEC (cmn-Hans)": 37.34, - "BIOSSES": 85.01, - "BQ (cmn-Hans)": 38.03, - "CDSC-R (pol-Latn)": 92.23, - "LCQMC (cmn-Hans)": 71.38, - "PAWSX (cmn-Hans)": 16.4, - "RUParaPhraserSTS (rus-Cyrl)": 71.08, - "RuSTSBenchmarkSTS (rus-Cyrl)": 76.26, - "SICK-R": 81.47, - "SICK-R-PL (pol-Latn)": 72.78, - "SICKFr (fra-Latn)": 76.91, - "STS12": 65.84, - "STS13": 78.37, - "STS14": 77.52, - "STS15": 85.43, - "STS16": 79.94, - "STS17 (ita-Latn_eng-Latn)": 88.42, - "STS17 (fra-Latn_eng-Latn)": 87.9, - "STS17 (kor-Hang)": 78.74, - "STS17 (en-en)": 90.12, - "STS17 (nld-Latn_eng-Latn)": 88.29, - "STS17 (ara-Arab)": 79.28, - "STS17 (eng-Latn_deu-Latn)": 88.92, - "STS17 (spa-Latn)": 87.12, - "STS17 (eng-Latn_tur-Latn)": 77.47, - "STS17 (spa-Latn_eng-Latn)": 87.47, - "STS17 (eng-Latn_ara-Arab)": 74.45, - "STS22 (spa-Latn_eng-Latn)": 80.76, - "STS22 (ara-Arab)": 55.45, - "STS22 (pol-Latn_eng-Latn)": 77.77, - "STS22 (deu-Latn_pol-Latn)": 55.09, - "STS22 (en)": 68.59, - "STS22 (rus-Cyrl)": 68.46, - "STS22 (deu-Latn_eng-Latn)": 62.33, - "STS22 (cmn-Hans)": 72.29, - "STS22 (pol-Latn)": 48.07, - "STS22 (fra-Latn)": 83.09, - "STS22 (cmn-Hans_eng-Latn)": 72.73, - "STS22 (deu-Latn_fra-Latn)": 62.14, - "STS22 (spa-Latn_ita-Latn)": 77.63, - "STS22 (fra-Latn_pol-Latn)": 84.52, - "STS22 (ita-Latn)": 77.58, - "STS22 (spa-Latn)": 72.24, - "STS22 (deu-Latn)": 59.34, - "STS22 (tur-Latn)": 70.83, - "STSB (cmn-Hans)": 74.11, - "STSBenchmark": 83.1, - "STSBenchmarkMultilingualSTS (spa-Latn)": 79.51, - "STSBenchmarkMultilingualSTS (ita-Latn)": 76.24, - "STSBenchmarkMultilingualSTS (por-Latn)": 76.61, - "STSBenchmarkMultilingualSTS (fra-Latn)": 77.48, - "STSBenchmarkMultilingualSTS (deu-Latn)": 77.57, - "STSBenchmarkMultilingualSTS (en)": 83.12, - "STSBenchmarkMultilingualSTS (nld-Latn)": 74.83, - "STSBenchmarkMultilingualSTS (pol-Latn)": 74.67, - "STSBenchmarkMultilingualSTS (cmn-Hans)": 75.27, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 76.19 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "GritLM-7B", - "SummEval": 30.26, - "SummEvalFr (fra-Latn)": 29.97 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "GritLM-7B", - "Core17InstructionRetrieval": 2.62, - "News21InstructionRetrieval": -1.01, - "Robust04InstructionRetrieval": -1.68 - } - ] - } - }, - "use-cmlm-multilingual": { - "BitextMining": { - "f1": [ - { - "Model": "use-cmlm-multilingual" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "use-cmlm-multilingual" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "use-cmlm-multilingual", - "BlurbsClusteringP2P": 29.63, - "BlurbsClusteringS2S": 15.24, - "TenKGnadClusteringP2P": 37.1, - "TenKGnadClusteringS2S": 25.64 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "use-cmlm-multilingual" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "use-cmlm-multilingual" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "use-cmlm-multilingual" - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "use-cmlm-multilingual" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "use-cmlm-multilingual" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "use-cmlm-multilingual" - } - ] - } - }, - "LLM2Vec-Sheared-Llama-supervised": { - "BitextMining": { - "f1": [ - { - "Model": "LLM2Vec-Sheared-Llama-supervised" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "LLM2Vec-Sheared-Llama-supervised", - "AmazonCounterfactualClassification (en)": 77.42, - "AmazonPolarityClassification": 82.05, - "AmazonReviewsClassification (en)": 40.81, - "Banking77Classification": 86.01, - "EmotionClassification": 48.38, - "ImdbClassification": 75.33, - "MTOPDomainClassification (en)": 94.09, - "MTOPIntentClassification (en)": 77.05, - "MassiveIntentClassification (en)": 75.58, - "MassiveScenarioClassification (en)": 79.16, - "ToxicConversationsClassification": 69.92, - "TweetSentimentExtractionClassification": 60.76 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "LLM2Vec-Sheared-Llama-supervised", - "ArxivClusteringP2P": 43.47, - "ArxivClusteringS2S": 39.85, - "BiorxivClusteringP2P": 37.1, - "BiorxivClusteringS2S": 34.28, - "MedrxivClusteringP2P": 33.55, - "MedrxivClusteringS2S": 31.11, - "RedditClustering": 53.02, - "RedditClusteringP2P": 60.47, - "StackExchangeClustering": 63.04, - "StackExchangeClusteringP2P": 34.01, - "TwentyNewsgroupsClustering": 49.37 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "LLM2Vec-Sheared-Llama-supervised", - "SprintDuplicateQuestions": 96.25, - "TwitterSemEval2015": 76.14, - "TwitterURLCorpus": 86.23 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "LLM2Vec-Sheared-Llama-supervised", - "AskUbuntuDupQuestions": 60.71, - "MindSmallReranking": 31.96, - "SciDocsRR": 79.23, - "StackOverflowDupQuestions": 49.61 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "LLM2Vec-Sheared-Llama-supervised", - "ArguAna": 51.66, - "CQADupstackRetrieval": 41.73, - "ClimateFEVER": 33.49, - "DBPedia": 43.58, - "FEVER": 86.81, - "FiQA2018": 41.0, - "HotpotQA": 63.85, - "MSMARCO": 38.32, - "NFCorpus": 37.12, - "NQ": 53.89, - "QuoraRetrieval": 87.37, - "SCIDOCS": 17.96, - "SciFact": 72.08, - "TRECCOVID": 80.41, - "Touche2020": 22.31 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "LLM2Vec-Sheared-Llama-supervised", - "BIOSSES": 85.88, - "SICK-R": 82.25, - "STS12": 78.28, - "STS13": 85.52, - "STS14": 82.49, - "STS15": 88.76, - "STS16": 87.11, - "STS17 (en-en)": 90.1, - "STS22 (en)": 68.25, - "STSBenchmark": 87.16 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "LLM2Vec-Sheared-Llama-supervised", - "SummEval": 30.01 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "LLM2Vec-Sheared-Llama-supervised" - } - ] - } - }, - "text-search-curie-001": { - "BitextMining": { - "f1": [ - { - "Model": "text-search-curie-001" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "text-search-curie-001" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "text-search-curie-001" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "text-search-curie-001" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "text-search-curie-001" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "text-search-curie-001", - "ArguAna": 46.98, - "ClimateFEVER": 19.4, - "FEVER": 75.6, - "FiQA2018": 45.21, - "HotpotQA": 64.8, - "NFCorpus": 38.01, - "QuoraRetrieval": 67.7, - "SCIDOCS": 17.74, - "SciFact": 74.35, - "TRECCOVID": 56.14, - "Touche2020": 30.9 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "text-search-curie-001" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "text-search-curie-001" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "text-search-curie-001" - } - ] - } - }, - "text-embedding-ada-002-instruct": { - "BitextMining": { - "f1": [ - { - "Model": "text-embedding-ada-002-instruct" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "text-embedding-ada-002-instruct" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "text-embedding-ada-002-instruct" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "text-embedding-ada-002-instruct" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "text-embedding-ada-002-instruct" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "text-embedding-ada-002-instruct", - "ARCChallenge": 11.85, - "AlphaNLI": 10.62, - "HellaSwag": 24.8, - "PIQA": 23.87, - "Quail": 5.79, - "RARbCode": 82.36, - "RARbMath": 67.26, - "SIQA": 2.64, - "SpartQA": 4.75, - "TempReasonL1": 1.44, - "TempReasonL2Fact": 19.38, - "TempReasonL2Pure": 2.43, - "TempReasonL3Fact": 17.58, - "TempReasonL3Pure": 7.31, - "WinoGrande": 11.36 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "text-embedding-ada-002-instruct" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "text-embedding-ada-002-instruct" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "text-embedding-ada-002-instruct" - } - ] - } - }, - "voyage-large-2-instruct": { - "BitextMining": { - "f1": [ - { - "Model": "voyage-large-2-instruct" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "voyage-large-2-instruct", - "AmazonCounterfactualClassification (en)": 77.6, - "AmazonPolarityClassification": 96.58, - "AmazonReviewsClassification (en)": 50.77, - "Banking77Classification": 86.96, - "EmotionClassification": 59.81, - "ImdbClassification": 96.13, - "MTOPDomainClassification (en)": 98.86, - "MTOPIntentClassification (en)": 86.97, - "MassiveIntentClassification (en)": 81.08, - "MassiveScenarioClassification (en)": 87.95, - "ToxicConversationsClassification": 83.58, - "TweetSentimentExtractionClassification": 71.55 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "voyage-large-2-instruct", - "ArxivClusteringP2P": 51.81, - "ArxivClusteringS2S": 44.73, - "BiorxivClusteringP2P": 46.07, - "BiorxivClusteringS2S": 40.64, - "MedrxivClusteringP2P": 42.94, - "MedrxivClusteringS2S": 41.44, - "RedditClustering": 68.5, - "RedditClusteringP2P": 64.86, - "StackExchangeClustering": 74.16, - "StackExchangeClusteringP2P": 45.1, - "TwentyNewsgroupsClustering": 66.62 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "voyage-large-2-instruct", - "SprintDuplicateQuestions": 94.5, - "TwitterSemEval2015": 86.32, - "TwitterURLCorpus": 86.9 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "voyage-large-2-instruct", - "AskUbuntuDupQuestions": 64.92, - "MindSmallReranking": 30.97, - "SciDocsRR": 89.34, - "StackOverflowDupQuestions": 55.11 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "voyage-large-2-instruct", - "ArguAna": 64.06, - "BrightRetrieval (theoremqa_questions)": 26.06, - "BrightRetrieval (earth_science)": 25.09, - "BrightRetrieval (leetcode)": 30.6, - "BrightRetrieval (economics)": 19.85, - "BrightRetrieval (robotics)": 11.21, - "BrightRetrieval (psychology)": 24.79, - "BrightRetrieval (aops)": 7.45, - "BrightRetrieval (sustainable_living)": 15.58, - "BrightRetrieval (pony)": 1.48, - "BrightRetrieval (theoremqa_theorems)": 10.13, - "BrightRetrieval (biology)": 23.55, - "BrightRetrieval (stackoverflow)": 15.03, - "CQADupstackRetrieval": 46.6, - "ClimateFEVER": 32.65, - "DBPedia": 46.03, - "FEVER": 91.47, - "FiQA2018": 59.76, - "HotpotQA": 70.86, - "MSMARCO": 40.6, - "NFCorpus": 40.32, - "NQ": 65.92, - "QuoraRetrieval": 87.4, - "SCIDOCS": 24.32, - "SciFact": 79.99, - "TRECCOVID": 85.07, - "Touche2020": 39.16 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "voyage-large-2-instruct", - "BIOSSES": 89.24, - "SICK-R": 83.16, - "STS12": 73.34, - "STS13": 88.49, - "STS14": 86.49, - "STS15": 91.13, - "STS16": 85.68, - "STS17 (en-en)": 90.06, - "STS22 (en)": 66.32, - "STSBenchmark": 89.22 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "voyage-large-2-instruct", - "SummEval": 30.84 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "voyage-large-2-instruct" - } - ] - } - }, - "LaBSE-ru-turbo": { - "BitextMining": { - "f1": [ - { - "Model": "LaBSE-ru-turbo", - "Tatoeba (rus-Cyrl_eng-Latn)": 93.22 - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "LaBSE-ru-turbo", - "GeoreviewClassification (rus-Cyrl)": 46.04, - "HeadlineClassification (rus-Cyrl)": 69.98, - "InappropriatenessClassification (rus-Cyrl)": 61.39, - "KinopoiskClassification (rus-Cyrl)": 53.59, - "MassiveIntentClassification (rus-Cyrl)": 66.08, - "MassiveScenarioClassification (rus-Cyrl)": 71.13, - "RuReviewsClassification (rus-Cyrl)": 64.58, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 56.67, - "RuSciBenchOECDClassification (rus-Cyrl)": 43.58 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "LaBSE-ru-turbo", - "GeoreviewClusteringP2P (rus-Cyrl)": 64.55, - "MLSUMClusteringP2P (rus-Cyrl)": 45.7, - "MLSUMClusteringS2S (rus-Cyrl)": 42.93, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 50.64, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.48 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "LaBSE-ru-turbo", - "OpusparcusPC (rus-Cyrl)": 89.32, - "TERRa (rus-Cyrl)": 57.81 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "LaBSE-ru-turbo", - "RuBQReranking (rus-Cyrl)": 68.65 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "LaBSE-ru-turbo", - "RiaNewsRetrieval (rus-Cyrl)": 69.36, - "RuBQRetrieval (rus-Cyrl)": 65.71 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "LaBSE-ru-turbo", - "RUParaPhraserSTS (rus-Cyrl)": 72.97, - "RuSTSBenchmarkSTS (rus-Cyrl)": 81.77, - "STS22 (rus-Cyrl)": 62.89, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 81.81 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "LaBSE-ru-turbo" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "LaBSE-ru-turbo" - } - ] - } - }, - "tart-full-flan-t5-xl": { - "BitextMining": { - "f1": [ - { - "Model": "tart-full-flan-t5-xl" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "tart-full-flan-t5-xl" - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "tart-full-flan-t5-xl" - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "tart-full-flan-t5-xl" - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "tart-full-flan-t5-xl" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "tart-full-flan-t5-xl" - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "tart-full-flan-t5-xl" - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "tart-full-flan-t5-xl" - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "tart-full-flan-t5-xl", - "Core17InstructionRetrieval": 2.82, - "News21InstructionRetrieval": 1.99, - "Robust04InstructionRetrieval": -0.72 - } - ] - } - }, - "sentence-t5-xxl": { - "BitextMining": { - "f1": [ - { - "Model": "sentence-t5-xxl" - } - ] - }, - "Classification": { - "accuracy": [ - { - "Model": "sentence-t5-xxl", - "AmazonCounterfactualClassification (en)": 77.07, - "AmazonPolarityClassification": 92.79, - "AmazonReviewsClassification (en)": 48.93, - "AmazonReviewsClassification (fr)": 46.09, - "Banking77Classification": 82.31, - "EmotionClassification": 48.57, - "ImdbClassification": 90.23, - "MTOPDomainClassification (en)": 92.49, - "MTOPDomainClassification (fr)": 86.2, - "MTOPIntentClassification (en)": 68.33, - "MTOPIntentClassification (fr)": 58.33, - "MasakhaNEWSClassification (fra)": 79.1, - "MassiveIntentClassification (en)": 73.44, - "MassiveIntentClassification (fr)": 65.91, - "MassiveScenarioClassification (en)": 74.82, - "MassiveScenarioClassification (fr)": 68.53, - "ToxicConversationsClassification": 70.04, - "TweetSentimentExtractionClassification": 62.01 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "sentence-t5-xxl", - "AlloProfClusteringP2P": 60.98, - "AlloProfClusteringS2S": 43.5, - "ArxivClusteringP2P": 42.89, - "ArxivClusteringS2S": 33.47, - "BiorxivClusteringP2P": 36.53, - "BiorxivClusteringS2S": 28.66, - "BlurbsClusteringP2P": 39.91, - "BlurbsClusteringS2S": 15.94, - "HALClusteringS2S": 21.4, - "MLSUMClusteringP2P": 42.24, - "MLSUMClusteringS2S": 35.25, - "MasakhaNEWSClusteringP2P (fra)": 61.15, - "MasakhaNEWSClusteringS2S (fra)": 38.24, - "MedrxivClusteringP2P": 32.09, - "MedrxivClusteringS2S": 26.82, - "RedditClustering": 58.99, - "RedditClusteringP2P": 64.46, - "StackExchangeClustering": 70.78, - "StackExchangeClusteringP2P": 35.25, - "TenKGnadClusteringP2P": 43.43, - "TenKGnadClusteringS2S": 19.69, - "TwentyNewsgroupsClustering": 50.93 - } - ] - }, - "PairClassification": { - "ap": [ - { - "Model": "sentence-t5-xxl", - "OpusparcusPC (fr)": 93.94, - "PawsXPairClassification (fr)": 63.98, - "SprintDuplicateQuestions": 88.89, - "TwitterSemEval2015": 80.28, - "TwitterURLCorpus": 86.01 - } - ] - }, - "Reranking": { - "map": [ - { - "Model": "sentence-t5-xxl", - "AlloprofReranking": 68.36, - "AskUbuntuDupQuestions": 66.16, - "MindSmallReranking": 30.6, - "SciDocsRR": 76.09, - "StackOverflowDupQuestions": 52.85, - "SyntecReranking": 85.15 - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "sentence-t5-xxl", - "AlloprofRetrieval": 45.75, - "ArguAna": 39.85, - "BSARDRetrieval": 3.33, - "CQADupstackRetrieval": 44.65, - "ClimateFEVER": 14.63, - "DBPedia": 39.19, - "FEVER": 51.2, - "FiQA2018": 46.68, - "HotpotQA": 42.14, - "MSMARCO": 27.67, - "MintakaRetrieval (fr)": 34.93, - "NFCorpus": 35.08, - "NQ": 52.87, - "QuoraRetrieval": 85.96, - "SCIDOCS": 17.17, - "SciFact": 55.38, - "SyntecRetrieval": 78.97, - "TRECCOVID": 59.48, - "Touche2020": 21.65, - "XPQARetrieval (fr)": 56.2 - } - ] - }, - "STS": { - "spearman": [ - { - "Model": "sentence-t5-xxl", - "BIOSSES": 80.43, - "SICK-R": 80.47, - "SICKFr": 77.07, - "STS12": 78.85, - "STS13": 88.94, - "STS14": 84.86, - "STS15": 89.32, - "STS16": 84.67, - "STS17 (en-en)": 89.46, - "STS22 (en)": 65.33, - "STS22 (fr)": 76.8, - "STSBenchmark": 84.01, - "STSBenchmarkMultilingualSTS (fr)": 81.24 - } - ] - }, - "Summarization": { - "spearman": [ - { - "Model": "sentence-t5-xxl", - "SummEval": 30.08, - "SummEvalFr": 30.39 - } - ] - }, - "InstructionRetrieval": { - "p-MRR": [ - { - "Model": "sentence-t5-xxl" - } - ] - } - }, - "LaBSE": { - "BitextMining": { - "f1": [ - { - "Model": "LaBSE", - "BUCC (de-en)": 99.35, - "BUCC (fr-en)": 98.72, - "BUCC (ru-en)": 97.78, - "BUCC (zh-en)": 99.16, - "BornholmBitextMining (dan-Latn)": 45.63, - "Tatoeba (ber-Tfng_eng-Latn)": 8.4, - "Tatoeba (kab-Latn_eng-Latn)": 4.31, - "Tatoeba (tur-Latn_eng-Latn)": 98.0, - "Tatoeba (gle-Latn_eng-Latn)": 93.8, - "Tatoeba (awa-Deva_eng-Latn)": 71.7, - "Tatoeba (yue-Hant_eng-Latn)": 89.58, - "Tatoeba (tzl-Latn_eng-Latn)": 58.88, - "Tatoeba (tat-Cyrl_eng-Latn)": 85.92, - "Tatoeba (fin-Latn_eng-Latn)": 96.37, - "Tatoeba (cor-Latn_eng-Latn)": 10.11, - "Tatoeba (hye-Armn_eng-Latn)": 94.09, - "Tatoeba (ben-Beng_eng-Latn)": 88.55, - "Tatoeba (epo-Latn_eng-Latn)": 98.2, - "Tatoeba (ile-Latn_eng-Latn)": 85.58, - "Tatoeba (nld-Latn_eng-Latn)": 96.07, - "Tatoeba (mar-Deva_eng-Latn)": 92.65, - "Tatoeba (cmn-Hans_eng-Latn)": 95.1, - "Tatoeba (hin-Deva_eng-Latn)": 96.87, - "Tatoeba (tgl-Latn_eng-Latn)": 96.02, - "Tatoeba (mon-Cyrl_eng-Latn)": 95.91, - "Tatoeba (oci-Latn_eng-Latn)": 65.81, - "Tatoeba (dan-Latn_eng-Latn)": 95.71, - "Tatoeba (mkd-Cyrl_eng-Latn)": 93.6, - "Tatoeba (ces-Latn_eng-Latn)": 96.68, - "Tatoeba (fra-Latn_eng-Latn)": 94.86, - "Tatoeba (yid-Hebr_eng-Latn)": 88.79, - "Tatoeba (est-Latn_eng-Latn)": 96.55, - "Tatoeba (ast-Latn_eng-Latn)": 90.68, - "Tatoeba (ind-Latn_eng-Latn)": 93.66, - "Tatoeba (bre-Latn_eng-Latn)": 15.07, - "Tatoeba (eus-Latn_eng-Latn)": 95.01, - "Tatoeba (heb-Hebr_eng-Latn)": 91.53, - "Tatoeba (rus-Cyrl_eng-Latn)": 93.75, - "Tatoeba (lfn-Latn_eng-Latn)": 67.54, - "Tatoeba (jav-Latn_eng-Latn)": 79.77, - "Tatoeba (ukr-Cyrl_eng-Latn)": 93.97, - "Tatoeba (ell-Grek_eng-Latn)": 95.35, - "Tatoeba (nds-Latn_eng-Latn)": 79.42, - "Tatoeba (arz-Arab_eng-Latn)": 76.0, - "Tatoeba (gla-Latn_eng-Latn)": 85.66, - "Tatoeba (cbk-Latn_eng-Latn)": 79.44, - "Tatoeba (max-Deva_eng-Latn)": 63.26, - "Tatoeba (ron-Latn_eng-Latn)": 96.92, - "Tatoeba (ido-Latn_eng-Latn)": 89.42, - "Tatoeba (lvs-Latn_eng-Latn)": 95.88, - "Tatoeba (khm-Khmr_eng-Latn)": 78.37, - "Tatoeba (urd-Arab_eng-Latn)": 93.22, - "Tatoeba (glg-Latn_eng-Latn)": 96.82, - "Tatoeba (gsw-Latn_eng-Latn)": 46.5, - "Tatoeba (swe-Latn_eng-Latn)": 95.63, - "Tatoeba (swh-Latn_eng-Latn)": 84.5, - "Tatoeba (tha-Thai_eng-Latn)": 96.14, - "Tatoeba (tam-Taml_eng-Latn)": 89.0, - "Tatoeba (uzb-Latn_eng-Latn)": 84.23, - "Tatoeba (bul-Cyrl_eng-Latn)": 94.58, - "Tatoeba (kur-Latn_eng-Latn)": 83.59, - "Tatoeba (ina-Latn_eng-Latn)": 95.37, - "Tatoeba (nov-Latn_eng-Latn)": 74.38, - "Tatoeba (afr-Latn_eng-Latn)": 96.18, - "Tatoeba (csb-Latn_eng-Latn)": 52.57, - "Tatoeba (war-Latn_eng-Latn)": 60.29, - "Tatoeba (cha-Latn_eng-Latn)": 31.77, - "Tatoeba (pes-Arab_eng-Latn)": 94.7, - "Tatoeba (kat-Geor_eng-Latn)": 95.02, - "Tatoeba (bos-Latn_eng-Latn)": 94.92, - "Tatoeba (kor-Hang_eng-Latn)": 90.95, - "Tatoeba (slk-Latn_eng-Latn)": 96.5, - "Tatoeba (fry-Latn_eng-Latn)": 89.31, - "Tatoeba (ara-Arab_eng-Latn)": 88.8, - "Tatoeba (sqi-Latn_eng-Latn)": 96.76, - "Tatoeba (ita-Latn_eng-Latn)": 92.72, - "Tatoeba (lat-Latn_eng-Latn)": 80.07, - "Tatoeba (hsb-Latn_eng-Latn)": 67.11, - "Tatoeba (swg-Latn_eng-Latn)": 59.36, - "Tatoeba (srp-Cyrl_eng-Latn)": 94.43, - "Tatoeba (isl-Latn_eng-Latn)": 94.75, - "Tatoeba (hrv-Latn_eng-Latn)": 96.95, - "Tatoeba (wuu-Hans_eng-Latn)": 90.18, - "Tatoeba (mhr-Cyrl_eng-Latn)": 15.74, - "Tatoeba (vie-Latn_eng-Latn)": 97.2, - "Tatoeba (cym-Latn_eng-Latn)": 92.0, - "Tatoeba (dsb-Latn_eng-Latn)": 64.81, - "Tatoeba (hun-Latn_eng-Latn)": 96.55, - "Tatoeba (slv-Latn_eng-Latn)": 96.03, - "Tatoeba (orv-Cyrl_eng-Latn)": 38.93, - "Tatoeba (cat-Latn_eng-Latn)": 95.38, - "Tatoeba (dtp-Latn_eng-Latn)": 10.85, - "Tatoeba (por-Latn_eng-Latn)": 94.14, - "Tatoeba (jpn-Jpan_eng-Latn)": 95.38, - "Tatoeba (ang-Latn_eng-Latn)": 59.28, - "Tatoeba (aze-Latn_eng-Latn)": 94.93, - "Tatoeba (kzj-Latn_eng-Latn)": 11.33, - "Tatoeba (deu-Latn_eng-Latn)": 99.2, - "Tatoeba (uig-Arab_eng-Latn)": 92.4, - "Tatoeba (tel-Telu_eng-Latn)": 97.86, - "Tatoeba (tuk-Latn_eng-Latn)": 75.27, - "Tatoeba (nob-Latn_eng-Latn)": 98.4, - "Tatoeba (nno-Latn_eng-Latn)": 94.48, - "Tatoeba (spa-Latn_eng-Latn)": 98.4, - "Tatoeba (mal-Mlym_eng-Latn)": 98.45, - "Tatoeba (pam-Latn_eng-Latn)": 10.73, - "Tatoeba (xho-Latn_eng-Latn)": 91.55, - "Tatoeba (arq-Arab_eng-Latn)": 42.69, - "Tatoeba (kaz-Cyrl_eng-Latn)": 87.49, - "Tatoeba (bel-Cyrl_eng-Latn)": 95.0, - "Tatoeba (pol-Latn_eng-Latn)": 97.22, - "Tatoeba (fao-Latn_eng-Latn)": 87.4, - "Tatoeba (zsm-Latn_eng-Latn)": 95.62, - "Tatoeba (lit-Latn_eng-Latn)": 96.47, - "Tatoeba (ceb-Latn_eng-Latn)": 64.42, - "Tatoeba (pms-Latn_eng-Latn)": 64.57, - "Tatoeba (amh-Ethi_eng-Latn)": 91.47, - "Tatoeba (afr-eng)": 96.18, - "Tatoeba (amh-eng)": 91.47, - "Tatoeba (ang-eng)": 59.28, - "Tatoeba (ara-eng)": 88.8, - "Tatoeba (arq-eng)": 42.69, - "Tatoeba (arz-eng)": 76.0, - "Tatoeba (ast-eng)": 90.68, - "Tatoeba (awa-eng)": 71.7, - "Tatoeba (aze-eng)": 94.93, - "Tatoeba (bel-eng)": 95.0, - "Tatoeba (ben-eng)": 88.55, - "Tatoeba (ber-eng)": 8.4, - "Tatoeba (bos-eng)": 94.92, - "Tatoeba (bre-eng)": 15.07, - "Tatoeba (bul-eng)": 94.58, - "Tatoeba (cat-eng)": 95.38, - "Tatoeba (cbk-eng)": 79.44, - "Tatoeba (ceb-eng)": 64.42, - "Tatoeba (ces-eng)": 96.68, - "Tatoeba (cha-eng)": 31.77, - "Tatoeba (cmn-eng)": 95.1, - "Tatoeba (cor-eng)": 10.11, - "Tatoeba (csb-eng)": 52.57, - "Tatoeba (cym-eng)": 92.0, - "Tatoeba (dan-eng)": 95.71, - "Tatoeba (deu-eng)": 99.2, - "Tatoeba (dsb-eng)": 64.81, - "Tatoeba (dtp-eng)": 10.85, - "Tatoeba (ell-eng)": 95.35, - "Tatoeba (epo-eng)": 98.2, - "Tatoeba (est-eng)": 96.55, - "Tatoeba (eus-eng)": 95.01, - "Tatoeba (fao-eng)": 87.4, - "Tatoeba (fin-eng)": 96.37, - "Tatoeba (fra-eng)": 94.86, - "Tatoeba (fry-eng)": 89.31, - "Tatoeba (gla-eng)": 85.66, - "Tatoeba (gle-eng)": 93.8, - "Tatoeba (glg-eng)": 96.82, - "Tatoeba (gsw-eng)": 46.5, - "Tatoeba (heb-eng)": 91.53, - "Tatoeba (hin-eng)": 96.87, - "Tatoeba (hrv-eng)": 96.95, - "Tatoeba (hsb-eng)": 67.11, - "Tatoeba (hun-eng)": 96.55, - "Tatoeba (hye-eng)": 94.09, - "Tatoeba (ido-eng)": 89.42, - "Tatoeba (ile-eng)": 85.58, - "Tatoeba (ina-eng)": 95.37, - "Tatoeba (ind-eng)": 93.66, - "Tatoeba (isl-eng)": 94.75, - "Tatoeba (ita-eng)": 92.72, - "Tatoeba (jav-eng)": 79.77, - "Tatoeba (jpn-eng)": 95.38, - "Tatoeba (kab-eng)": 4.31, - "Tatoeba (kat-eng)": 95.02, - "Tatoeba (kaz-eng)": 87.49, - "Tatoeba (khm-eng)": 78.37, - "Tatoeba (kor-eng)": 90.95, - "Tatoeba (kur-eng)": 83.59, - "Tatoeba (kzj-eng)": 11.33, - "Tatoeba (lat-eng)": 80.07, - "Tatoeba (lfn-eng)": 67.54, - "Tatoeba (lit-eng)": 96.47, - "Tatoeba (lvs-eng)": 95.88, - "Tatoeba (mal-eng)": 98.45, - "Tatoeba (mar-eng)": 92.65, - "Tatoeba (max-eng)": 63.26, - "Tatoeba (mhr-eng)": 15.74, - "Tatoeba (mkd-eng)": 93.6, - "Tatoeba (mon-eng)": 95.91, - "Tatoeba (nds-eng)": 79.42, - "Tatoeba (nld-eng)": 96.07, - "Tatoeba (nno-eng)": 94.48, - "Tatoeba (nob-eng)": 98.4, - "Tatoeba (nov-eng)": 74.38, - "Tatoeba (oci-eng)": 65.81, - "Tatoeba (orv-eng)": 38.93, - "Tatoeba (pam-eng)": 10.73, - "Tatoeba (pes-eng)": 94.7, - "Tatoeba (pms-eng)": 64.57, - "Tatoeba (pol-eng)": 97.22, - "Tatoeba (por-eng)": 94.14, - "Tatoeba (ron-eng)": 96.92, - "Tatoeba (rus-eng)": 93.75, - "Tatoeba (slk-eng)": 96.5, - "Tatoeba (slv-eng)": 96.03, - "Tatoeba (spa-eng)": 98.4, - "Tatoeba (sqi-eng)": 96.76, - "Tatoeba (srp-eng)": 94.43, - "Tatoeba (swe-eng)": 95.63, - "Tatoeba (swg-eng)": 59.36, - "Tatoeba (swh-eng)": 84.5, - "Tatoeba (tam-eng)": 89.0, - "Tatoeba (tat-eng)": 85.92, - "Tatoeba (tel-eng)": 97.86, - "Tatoeba (tgl-eng)": 96.02, - "Tatoeba (tha-eng)": 96.14, - "Tatoeba (tuk-eng)": 75.27, - "Tatoeba (tur-eng)": 98.0, - "Tatoeba (tzl-eng)": 58.88, - "Tatoeba (uig-eng)": 92.4, - "Tatoeba (ukr-eng)": 93.97, - "Tatoeba (urd-eng)": 93.22, - "Tatoeba (uzb-eng)": 84.23, - "Tatoeba (vie-eng)": 97.2, - "Tatoeba (war-eng)": 60.29, - "Tatoeba (wuu-eng)": 90.18, - "Tatoeba (xho-eng)": 91.55, - "Tatoeba (yid-eng)": 88.79, - "Tatoeba (yue-eng)": 89.58, - "Tatoeba (zsm-eng)": 95.62 - } - ] - }, - "Classification": { - "accuracy": [ + "Classification": { + "accuracy": [ { "Model": "LaBSE", "AllegroReviews (pol-Latn)": 34.86, @@ -20404,3069 +8261,13509 @@ "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 35.39, "OnlineShopping (cmn-Hans)": 85.63, "PAC (pol-Latn)": 68.09, - "PAC": 68.11, - "PolEmo2.0-IN (pol-Latn)": 63.91, - "PolEmo2.0-IN": 64.0, - "PolEmo2.0-OUT (pol-Latn)": 44.76, - "PolEmo2.0-OUT": 44.72, - "RuReviewsClassification (rus-Cyrl)": 58.01, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 53.04, - "RuSciBenchOECDClassification (rus-Cyrl)": 40.48, - "TNews (cmn-Hans)": 46.02, - "ToxicConversationsClassification": 66.9, - "TweetSentimentExtractionClassification": 58.82, - "Waimai (cmn-Hans)": 82.85 + "PAC": 68.11, + "PolEmo2.0-IN (pol-Latn)": 63.91, + "PolEmo2.0-IN": 64.0, + "PolEmo2.0-OUT (pol-Latn)": 44.76, + "PolEmo2.0-OUT": 44.72, + "RuReviewsClassification (rus-Cyrl)": 58.01, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 53.04, + "RuSciBenchOECDClassification (rus-Cyrl)": 40.48, + "TNews (cmn-Hans)": 46.02, + "ToxicConversationsClassification": 66.9, + "TweetSentimentExtractionClassification": 58.82, + "Waimai (cmn-Hans)": 82.85 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "LaBSE", + "8TagsClustering": 12.96, + "AlloProfClusteringP2P": 54.78, + "AlloProfClusteringS2S": 31.6, + "ArxivClusteringP2P": 32.13, + "ArxivClusteringS2S": 22.05, + "BiorxivClusteringP2P": 29.84, + "BiorxivClusteringS2S": 20.57, + "GeoreviewClusteringP2P (rus-Cyrl)": 52.19, + "HALClusteringS2S": 20.62, + "MLSUMClusteringP2P (rus-Cyrl)": 39.45, + "MLSUMClusteringP2P": 42.09, + "MLSUMClusteringS2S (rus-Cyrl)": 35.77, + "MLSUMClusteringS2S": 34.84, + "MasakhaNEWSClusteringP2P (amh-Ethi)": 67.78, + "MasakhaNEWSClusteringP2P (eng)": 48.16, + "MasakhaNEWSClusteringP2P (fra-Latn)": 46.16, + "MasakhaNEWSClusteringP2P (hau-Latn)": 39.77, + "MasakhaNEWSClusteringP2P (ibo-Latn)": 62.67, + "MasakhaNEWSClusteringP2P (lin-Latn)": 62.98, + "MasakhaNEWSClusteringP2P (lug-Latn)": 47.76, + "MasakhaNEWSClusteringP2P (orm-Ethi)": 28.76, + "MasakhaNEWSClusteringP2P (pcm-Latn)": 77.16, + "MasakhaNEWSClusteringP2P (run-Latn)": 60.36, + "MasakhaNEWSClusteringP2P (sna-Latn)": 63.57, + "MasakhaNEWSClusteringP2P (som-Latn)": 34.94, + "MasakhaNEWSClusteringP2P (swa-Latn)": 27.26, + "MasakhaNEWSClusteringP2P (tir-Ethi)": 51.59, + "MasakhaNEWSClusteringP2P (xho-Latn)": 45.32, + "MasakhaNEWSClusteringP2P (yor-Latn)": 48.73, + "MasakhaNEWSClusteringP2P (fra)": 46.16, + "MasakhaNEWSClusteringS2S (amh-Ethi)": 52.73, + "MasakhaNEWSClusteringS2S (eng)": 32.6, + "MasakhaNEWSClusteringS2S (fra-Latn)": 38.13, + "MasakhaNEWSClusteringS2S (hau-Latn)": 31.62, + "MasakhaNEWSClusteringS2S (ibo-Latn)": 32.27, + "MasakhaNEWSClusteringS2S (lin-Latn)": 49.38, + "MasakhaNEWSClusteringS2S (lug-Latn)": 47.63, + "MasakhaNEWSClusteringS2S (orm-Ethi)": 25.05, + "MasakhaNEWSClusteringS2S (pcm-Latn)": 68.18, + "MasakhaNEWSClusteringS2S (run-Latn)": 52.39, + "MasakhaNEWSClusteringS2S (sna-Latn)": 46.9, + "MasakhaNEWSClusteringS2S (som-Latn)": 24.08, + "MasakhaNEWSClusteringS2S (swa-Latn)": 15.83, + "MasakhaNEWSClusteringS2S (tir-Ethi)": 49.07, + "MasakhaNEWSClusteringS2S (xho-Latn)": 28.52, + "MasakhaNEWSClusteringS2S (yor-Latn)": 32.26, + "MasakhaNEWSClusteringS2S (fra)": 38.13, + "MedrxivClusteringP2P": 30.13, + "MedrxivClusteringS2S": 24.82, + "RedditClustering": 28.79, + "RedditClusteringP2P": 49.14, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 49.09, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.97, + "StackExchangeClustering": 35.43, + "StackExchangeClusteringP2P": 28.83, + "TwentyNewsgroupsClustering": 23.28 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "LaBSE", + "CDSC-E (pol-Latn)": 68.92, + "CDSC-E": 68.91, + "OpusparcusPC (deu-Latn)": 96.58, + "OpusparcusPC (en)": 98.12, + "OpusparcusPC (fin-Latn)": 94.44, + "OpusparcusPC (fra-Latn)": 93.96, + "OpusparcusPC (rus-Cyrl)": 87.3, + "OpusparcusPC (swe-Latn)": 93.69, + "OpusparcusPC (fr)": 93.96, + "PPC": 86.97, + "PSC (pol-Latn)": 97.42, + "PSC": 97.42, + "PawsXPairClassification (deu-Latn)": 51.07, + "PawsXPairClassification (en)": 54.07, + "PawsXPairClassification (spa-Latn)": 52.19, + "PawsXPairClassification (fra-Latn)": 54.63, + "PawsXPairClassification (jpn-Hira)": 47.56, + "PawsXPairClassification (kor-Hang)": 49.39, + "PawsXPairClassification (cmn-Hans)": 54.26, + "PawsXPairClassification (fr)": 54.63, + "SICK-E-PL (pol-Latn)": 63.77, + "SICK-E-PL": 63.77, + "SprintDuplicateQuestions": 89.26, + "TERRa (rus-Cyrl)": 55.71, + "TwitterSemEval2015": 62.78, + "TwitterURLCorpus": 84.58 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "LaBSE", + "AlloprofReranking (fra-Latn)": 55.37, + "AlloprofReranking": 49.51, + "AskUbuntuDupQuestions": 52.75, + "MMarcoReranking (cmn-Hans)": 14.83, + "MindSmallReranking": 29.81, + "RuBQReranking (rus-Cyrl)": 55.13, + "SciDocsRR": 68.72, + "StackOverflowDupQuestions": 42.42, + "SyntecReranking (fra-Latn)": 67.62, + "SyntecReranking": 73.28, + "T2Reranking (cmn-Hans)": 63.29 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "LaBSE", + "AILACasedocs": 17.67, + "AILAStatutes": 16.72, + "ARCChallenge": 3.78, + "AlloprofRetrieval (fra-Latn)": 19.77, + "AlloprofRetrieval": 19.77, + "AlphaNLI": 13.11, + "ArguAna": 34.18, + "ArguAna-PL (pol-Latn)": 38.56, + "ArguAna-PL": 38.52, + "BSARDRetrieval (fra-Latn)": 4.44, + "BSARDRetrieval": 0.0, + "CQADupstackRetrieval": 18.75, + "ClimateFEVER": 3.83, + "CmedqaRetrieval (cmn-Hans)": 5.49, + "CovidRetrieval (cmn-Hans)": 28.6, + "DBPedia": 15.57, + "DBPedia-PL": 16.1, + "DuRetrieval (cmn-Hans)": 26.34, + "EcomRetrieval (cmn-Hans)": 25.42, + "FEVER": 12.18, + "FiQA-PL (pol-Latn)": 7.66, + "FiQA-PL": 7.63, + "FiQA2018": 7.0, + "GerDaLIRSmall (deu-Latn)": 4.59, + "HellaSwag": 5.59, + "HotpotQA": 18.75, + "HotpotQA-PL": 19.72, + "LEMBNarrativeQARetrieval": 11.45, + "LEMBNeedleRetrieval": 17.5, + "LEMBPasskeyRetrieval": 20.25, + "LEMBQMSumRetrieval": 14.07, + "LEMBSummScreenFDRetrieval": 40.52, + "LEMBWikimQARetrieval": 28.1, + "LeCaRDv2 (zho-Hans)": 24.68, + "LegalBenchConsumerContractsQA": 54.66, + "LegalBenchCorporateLobbying": 69.39, + "LegalQuAD (deu-Latn)": 16.64, + "LegalSummarization": 53.89, + "MMarcoRetrieval (cmn-Hans)": 34.78, + "MSMARCO": 7.6, + "MSMARCO-PL": 7.22, + "MedicalRetrieval (cmn-Hans)": 6.68, + "MintakaRetrieval (ara-Arab)": 14.06, + "MintakaRetrieval (deu-Latn)": 15.26, + "MintakaRetrieval (spa-Latn)": 15.65, + "MintakaRetrieval (fra-Latn)": 15.53, + "MintakaRetrieval (hin-Deva)": 13.67, + "MintakaRetrieval (ita-Latn)": 15.94, + "MintakaRetrieval (jpn-Hira)": 12.8, + "MintakaRetrieval (por-Latn)": 15.03, + "MintakaRetrieval (fr)": 15.53, + "NFCorpus": 16.54, + "NFCorpus-PL (pol-Latn)": 17.45, + "NFCorpus-PL": 17.45, + "NQ": 8.42, + "NQ-PL": 9.65, + "PIQA": 6.53, + "Quail": 1.91, + "Quora-PL": 74.96, + "QuoraRetrieval": 77.03, + "RARbCode": 2.31, + "RARbMath": 27.19, + "RiaNewsRetrieval (rus-Cyrl)": 42.75, + "RuBQRetrieval (rus-Cyrl)": 30.02, + "SCIDOCS": 5.63, + "SCIDOCS-PL (pol-Latn)": 7.47, + "SCIDOCS-PL": 7.48, + "SIQA": 1.07, + "SciFact": 38.2, + "SciFact-PL (pol-Latn)": 39.79, + "SciFact-PL": 39.79, + "SpartQA": 1.56, + "SyntecRetrieval (fra-Latn)": 55.31, + "SyntecRetrieval": 55.31, + "T2Retrieval (cmn-Hans)": 25.32, + "TRECCOVID": 16.34, + "TRECCOVID-PL (pol-Latn)": 18.51, + "TRECCOVID-PL": 18.45, + "TempReasonL1": 1.56, + "TempReasonL2Fact": 7.06, + "TempReasonL2Pure": 0.14, + "TempReasonL3Fact": 8.74, + "TempReasonL3Pure": 4.73, + "Touche2020": 4.88, + "VideoRetrieval (cmn-Hans)": 22.04, + "WinoGrande": 54.3, + "XPQARetrieval (ara-Arab_ara-Arab)": 35.19, + "XPQARetrieval (eng-Latn_ara-Arab)": 20.64, + "XPQARetrieval (ara-Arab_eng-Latn)": 32.47, + "XPQARetrieval (deu-Latn_deu-Latn)": 53.56, + "XPQARetrieval (eng-Latn_deu-Latn)": 24.31, + "XPQARetrieval (deu-Latn_eng-Latn)": 54.87, + "XPQARetrieval (spa-Latn_spa-Latn)": 44.49, + "XPQARetrieval (eng-Latn_spa-Latn)": 25.31, + "XPQARetrieval (spa-Latn_eng-Latn)": 43.4, + "XPQARetrieval (fra-Latn_fra-Latn)": 51.74, + "XPQARetrieval (eng-Latn_fra-Latn)": 21.29, + "XPQARetrieval (fra-Latn_eng-Latn)": 49.4, + "XPQARetrieval (hin-Deva_hin-Deva)": 66.64, + "XPQARetrieval (eng-Latn_hin-Deva)": 23.25, + "XPQARetrieval (hin-Deva_eng-Latn)": 64.54, + "XPQARetrieval (ita-Latn_ita-Latn)": 56.27, + "XPQARetrieval (eng-Latn_ita-Latn)": 25.8, + "XPQARetrieval (ita-Latn_eng-Latn)": 52.69, + "XPQARetrieval (jpn-Hira_jpn-Hira)": 58.6, + "XPQARetrieval (eng-Latn_jpn-Hira)": 21.49, + "XPQARetrieval (jpn-Hira_eng-Latn)": 52.41, + "XPQARetrieval (kor-Hang_kor-Hang)": 27.66, + "XPQARetrieval (eng-Latn_kor-Hang)": 23.33, + "XPQARetrieval (kor-Hang_eng-Latn)": 23.96, + "XPQARetrieval (pol-Latn_pol-Latn)": 37.33, + "XPQARetrieval (eng-Latn_pol-Latn)": 16.19, + "XPQARetrieval (pol-Latn_eng-Latn)": 37.7, + "XPQARetrieval (por-Latn_por-Latn)": 38.49, + "XPQARetrieval (eng-Latn_por-Latn)": 19.41, + "XPQARetrieval (por-Latn_eng-Latn)": 37.33, + "XPQARetrieval (tam-Taml_tam-Taml)": 37.32, + "XPQARetrieval (eng-Latn_tam-Taml)": 20.53, + "XPQARetrieval (tam-Taml_eng-Latn)": 30.14, + "XPQARetrieval (cmn-Hans_cmn-Hans)": 50.7, + "XPQARetrieval (eng-Latn_cmn-Hans)": 20.59, + "XPQARetrieval (cmn-Hans_eng-Latn)": 48.23, + "XPQARetrieval (fr)": 51.74 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "LaBSE", + "AFQMC (cmn-Hans)": 21.02, + "ATEC (cmn-Hans)": 26.61, + "BIOSSES": 78.7, + "BQ (cmn-Hans)": 42.6, + "CDSC-R (pol-Latn)": 85.53, + "CDSC-R": 85.53, + "LCQMC (cmn-Hans)": 52.19, + "PAWSX (cmn-Hans)": 10.23, + "RUParaPhraserSTS (rus-Cyrl)": 65.74, + "RuSTSBenchmarkSTS (rus-Cyrl)": 73.34, + "SICK-R": 69.99, + "SICK-R-PL (pol-Latn)": 65.9, + "SICK-R-PL": 65.9, + "SICKFr (fra-Latn)": 69.94, + "SICKFr": 69.94, + "STS12": 65.08, + "STS13": 67.98, + "STS14": 64.03, + "STS15": 76.59, + "STS16": 72.98, + "STS17 (nld-Latn_eng-Latn)": 75.22, + "STS17 (eng-Latn_tur-Latn)": 72.07, + "STS17 (spa-Latn)": 80.83, + "STS17 (kor-Hang)": 71.32, + "STS17 (eng-Latn_deu-Latn)": 73.85, + "STS17 (ita-Latn_eng-Latn)": 76.99, + "STS17 (eng-Latn_ara-Arab)": 74.51, + "STS17 (ara-Arab)": 69.07, + "STS17 (fra-Latn_eng-Latn)": 76.98, + "STS17 (spa-Latn_eng-Latn)": 65.71, + "STS17 (en-en)": 79.45, + "STS17 (ar-ar)": 69.07, + "STS17 (en-ar)": 74.51, + "STS17 (en-de)": 73.85, + "STS17 (en-tr)": 72.07, + "STS17 (es-en)": 65.71, + "STS17 (es-es)": 80.83, + "STS17 (fr-en)": 76.98, + "STS17 (it-en)": 76.99, + "STS17 (ko-ko)": 71.32, + "STS17 (nl-en)": 75.22, + "STS22 (cmn-Hans)": 63.02, + "STS22 (spa-Latn)": 63.18, + "STS22 (en)": 60.97, + "STS22 (spa-Latn_ita-Latn)": 69.69, + "STS22 (deu-Latn)": 48.58, + "STS22 (fra-Latn)": 77.95, + "STS22 (ara-Arab)": 57.67, + "STS22 (spa-Latn_eng-Latn)": 71.86, + "STS22 (pol-Latn_eng-Latn)": 69.41, + "STS22 (ita-Latn)": 72.22, + "STS22 (pol-Latn)": 39.3, + "STS22 (deu-Latn_fra-Latn)": 53.28, + "STS22 (deu-Latn_pol-Latn)": 58.69, + "STS22 (fra-Latn_pol-Latn)": 61.98, + "STS22 (cmn-Hans_eng-Latn)": 64.02, + "STS22 (tur-Latn)": 58.15, + "STS22 (deu-Latn_eng-Latn)": 50.14, + "STS22 (rus-Cyrl)": 57.49, + "STS22 (ar)": 57.67, + "STS22 (de)": 48.58, + "STS22 (de-en)": 50.14, + "STS22 (de-fr)": 53.28, + "STS22 (de-pl)": 58.69, + "STS22 (es)": 63.18, + "STS22 (es-en)": 71.86, + "STS22 (es-it)": 69.69, + "STS22 (fr)": 77.95, + "STS22 (fr-pl)": 61.98, + "STS22 (it)": 72.22, + "STS22 (pl)": 39.28, + "STS22 (pl-en)": 69.41, + "STS22 (ru)": 57.49, + "STS22 (tr)": 58.15, + "STS22 (zh)": 63.02, + "STS22 (zh-en)": 64.02, + "STSB (cmn-Hans)": 68.38, + "STSBenchmark": 72.25, + "STSBenchmarkMultilingualSTS (en)": 72.25, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 73.06, + "STSBenchmarkMultilingualSTS (fra-Latn)": 75.1, + "STSBenchmarkMultilingualSTS (spa-Latn)": 72.92, + "STSBenchmarkMultilingualSTS (nld-Latn)": 70.22, + "STSBenchmarkMultilingualSTS (cmn-Hans)": 69.5, + "STSBenchmarkMultilingualSTS (ita-Latn)": 72.97, + "STSBenchmarkMultilingualSTS (por-Latn)": 71.65, + "STSBenchmarkMultilingualSTS (deu-Latn)": 72.43, + "STSBenchmarkMultilingualSTS (pol-Latn)": 72.58, + "STSBenchmarkMultilingualSTS (fr)": 75.1 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "LaBSE", + "SummEval": 31.05, + "SummEvalFr (fra-Latn)": 30.16, + "SummEvalFr": 30.16 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "LaBSE" + } + ] + } + }, + "paraphrase-multilingual-MiniLM-L12-v2": { + "BitextMining": { + "f1": [ + { + "Model": "paraphrase-multilingual-MiniLM-L12-v2", + "BUCC (de-en)": 97.11, + "BUCC (fr-en)": 94.99, + "BUCC (ru-en)": 95.06, + "BUCC (zh-en)": 95.63, + "BornholmBitextMining (dan-Latn)": 19.67, + "Tatoeba (nds-Latn_eng-Latn)": 32.16, + "Tatoeba (dtp-Latn_eng-Latn)": 5.69, + "Tatoeba (fry-Latn_eng-Latn)": 31.13, + "Tatoeba (rus-Cyrl_eng-Latn)": 91.87, + "Tatoeba (cmn-Hans_eng-Latn)": 94.93, + "Tatoeba (afr-Latn_eng-Latn)": 58.22, + "Tatoeba (ast-Latn_eng-Latn)": 62.17, + "Tatoeba (kab-Latn_eng-Latn)": 1.16, + "Tatoeba (cor-Latn_eng-Latn)": 3.42, + "Tatoeba (est-Latn_eng-Latn)": 97.33, + "Tatoeba (cym-Latn_eng-Latn)": 13.25, + "Tatoeba (dsb-Latn_eng-Latn)": 33.43, + "Tatoeba (oci-Latn_eng-Latn)": 38.57, + "Tatoeba (zsm-Latn_eng-Latn)": 95.31, + "Tatoeba (yid-Hebr_eng-Latn)": 14.38, + "Tatoeba (bel-Cyrl_eng-Latn)": 67.73, + "Tatoeba (gle-Latn_eng-Latn)": 11.62, + "Tatoeba (slv-Latn_eng-Latn)": 96.92, + "Tatoeba (lvs-Latn_eng-Latn)": 97.87, + "Tatoeba (orv-Cyrl_eng-Latn)": 15.1, + "Tatoeba (bul-Cyrl_eng-Latn)": 92.65, + "Tatoeba (tgl-Latn_eng-Latn)": 13.09, + "Tatoeba (ind-Latn_eng-Latn)": 92.74, + "Tatoeba (mon-Cyrl_eng-Latn)": 95.04, + "Tatoeba (fao-Latn_eng-Latn)": 27.51, + "Tatoeba (fin-Latn_eng-Latn)": 93.1, + "Tatoeba (srp-Cyrl_eng-Latn)": 92.24, + "Tatoeba (bos-Latn_eng-Latn)": 93.27, + "Tatoeba (kor-Hang_eng-Latn)": 92.52, + "Tatoeba (cat-Latn_eng-Latn)": 94.42, + "Tatoeba (por-Latn_eng-Latn)": 92.13, + "Tatoeba (spa-Latn_eng-Latn)": 95.42, + "Tatoeba (ukr-Cyrl_eng-Latn)": 92.82, + "Tatoeba (war-Latn_eng-Latn)": 7.25, + "Tatoeba (hsb-Latn_eng-Latn)": 36.1, + "Tatoeba (dan-Latn_eng-Latn)": 94.8, + "Tatoeba (nov-Latn_eng-Latn)": 47.99, + "Tatoeba (kat-Geor_eng-Latn)": 95.44, + "Tatoeba (gla-Latn_eng-Latn)": 3.61, + "Tatoeba (ron-Latn_eng-Latn)": 95.3, + "Tatoeba (glg-Latn_eng-Latn)": 94.0, + "Tatoeba (vie-Latn_eng-Latn)": 95.12, + "Tatoeba (pol-Latn_eng-Latn)": 94.28, + "Tatoeba (hrv-Latn_eng-Latn)": 95.98, + "Tatoeba (fra-Latn_eng-Latn)": 91.72, + "Tatoeba (hye-Armn_eng-Latn)": 93.28, + "Tatoeba (ile-Latn_eng-Latn)": 57.71, + "Tatoeba (arz-Arab_eng-Latn)": 51.26, + "Tatoeba (nob-Latn_eng-Latn)": 97.73, + "Tatoeba (amh-Ethi_eng-Latn)": 36.21, + "Tatoeba (nld-Latn_eng-Latn)": 94.58, + "Tatoeba (swg-Latn_eng-Latn)": 26.31, + "Tatoeba (cha-Latn_eng-Latn)": 15.98, + "Tatoeba (nno-Latn_eng-Latn)": 76.34, + "Tatoeba (mal-Mlym_eng-Latn)": 32.2, + "Tatoeba (urd-Arab_eng-Latn)": 94.57, + "Tatoeba (uzb-Latn_eng-Latn)": 17.14, + "Tatoeba (swe-Latn_eng-Latn)": 94.42, + "Tatoeba (wuu-Hans_eng-Latn)": 76.0, + "Tatoeba (ceb-Latn_eng-Latn)": 8.05, + "Tatoeba (hin-Deva_eng-Latn)": 97.62, + "Tatoeba (ces-Latn_eng-Latn)": 95.12, + "Tatoeba (arq-Arab_eng-Latn)": 18.6, + "Tatoeba (jav-Latn_eng-Latn)": 17.04, + "Tatoeba (swh-Latn_eng-Latn)": 14.48, + "Tatoeba (kzj-Latn_eng-Latn)": 6.24, + "Tatoeba (jpn-Jpan_eng-Latn)": 90.41, + "Tatoeba (xho-Latn_eng-Latn)": 4.52, + "Tatoeba (csb-Latn_eng-Latn)": 21.56, + "Tatoeba (max-Deva_eng-Latn)": 45.25, + "Tatoeba (ben-Beng_eng-Latn)": 36.48, + "Tatoeba (ara-Arab_eng-Latn)": 87.93, + "Tatoeba (kur-Latn_eng-Latn)": 46.94, + "Tatoeba (lit-Latn_eng-Latn)": 93.16, + "Tatoeba (isl-Latn_eng-Latn)": 24.07, + "Tatoeba (cbk-Latn_eng-Latn)": 55.37, + "Tatoeba (uig-Arab_eng-Latn)": 24.39, + "Tatoeba (mhr-Cyrl_eng-Latn)": 6.89, + "Tatoeba (slk-Latn_eng-Latn)": 95.15, + "Tatoeba (tha-Thai_eng-Latn)": 96.72, + "Tatoeba (ell-Grek_eng-Latn)": 95.43, + "Tatoeba (pam-Latn_eng-Latn)": 5.41, + "Tatoeba (pes-Arab_eng-Latn)": 92.59, + "Tatoeba (yue-Hant_eng-Latn)": 71.45, + "Tatoeba (tur-Latn_eng-Latn)": 95.08, + "Tatoeba (tel-Telu_eng-Latn)": 36.4, + "Tatoeba (eus-Latn_eng-Latn)": 23.18, + "Tatoeba (ina-Latn_eng-Latn)": 79.13, + "Tatoeba (aze-Latn_eng-Latn)": 62.1, + "Tatoeba (lfn-Latn_eng-Latn)": 47.02, + "Tatoeba (heb-Hebr_eng-Latn)": 86.88, + "Tatoeba (mar-Deva_eng-Latn)": 92.38, + "Tatoeba (sqi-Latn_eng-Latn)": 98.17, + "Tatoeba (tat-Cyrl_eng-Latn)": 10.25, + "Tatoeba (lat-Latn_eng-Latn)": 19.47, + "Tatoeba (tzl-Latn_eng-Latn)": 25.46, + "Tatoeba (tuk-Latn_eng-Latn)": 15.16, + "Tatoeba (ang-Latn_eng-Latn)": 10.24, + "Tatoeba (bre-Latn_eng-Latn)": 5.56, + "Tatoeba (ber-Tfng_eng-Latn)": 4.43, + "Tatoeba (gsw-Latn_eng-Latn)": 25.74, + "Tatoeba (ita-Latn_eng-Latn)": 93.05, + "Tatoeba (awa-Deva_eng-Latn)": 33.43, + "Tatoeba (tam-Taml_eng-Latn)": 24.64, + "Tatoeba (mkd-Cyrl_eng-Latn)": 91.0, + "Tatoeba (hun-Latn_eng-Latn)": 91.58, + "Tatoeba (pms-Latn_eng-Latn)": 30.7, + "Tatoeba (epo-Latn_eng-Latn)": 41.73, + "Tatoeba (ido-Latn_eng-Latn)": 40.25, + "Tatoeba (khm-Khmr_eng-Latn)": 32.11, + "Tatoeba (kaz-Cyrl_eng-Latn)": 34.89, + "Tatoeba (deu-Latn_eng-Latn)": 97.02, + "Tatoeba (afr-eng)": 58.22, + "Tatoeba (amh-eng)": 36.21, + "Tatoeba (ang-eng)": 10.24, + "Tatoeba (ara-eng)": 87.93, + "Tatoeba (arq-eng)": 18.6, + "Tatoeba (arz-eng)": 51.26, + "Tatoeba (ast-eng)": 62.17, + "Tatoeba (awa-eng)": 33.43, + "Tatoeba (aze-eng)": 62.1, + "Tatoeba (bel-eng)": 67.73, + "Tatoeba (ben-eng)": 36.48, + "Tatoeba (ber-eng)": 4.43, + "Tatoeba (bos-eng)": 93.27, + "Tatoeba (bre-eng)": 5.56, + "Tatoeba (bul-eng)": 92.65, + "Tatoeba (cat-eng)": 94.42, + "Tatoeba (cbk-eng)": 55.37, + "Tatoeba (ceb-eng)": 8.05, + "Tatoeba (ces-eng)": 95.12, + "Tatoeba (cha-eng)": 15.98, + "Tatoeba (cmn-eng)": 94.93, + "Tatoeba (cor-eng)": 3.42, + "Tatoeba (csb-eng)": 21.56, + "Tatoeba (cym-eng)": 13.25, + "Tatoeba (dan-eng)": 94.8, + "Tatoeba (deu-eng)": 97.02, + "Tatoeba (dsb-eng)": 33.43, + "Tatoeba (dtp-eng)": 5.69, + "Tatoeba (ell-eng)": 95.43, + "Tatoeba (epo-eng)": 41.73, + "Tatoeba (est-eng)": 97.33, + "Tatoeba (eus-eng)": 23.18, + "Tatoeba (fao-eng)": 27.51, + "Tatoeba (fin-eng)": 93.1, + "Tatoeba (fra-eng)": 91.72, + "Tatoeba (fry-eng)": 31.13, + "Tatoeba (gla-eng)": 3.61, + "Tatoeba (gle-eng)": 11.62, + "Tatoeba (glg-eng)": 94.0, + "Tatoeba (gsw-eng)": 25.74, + "Tatoeba (heb-eng)": 86.88, + "Tatoeba (hin-eng)": 97.62, + "Tatoeba (hrv-eng)": 95.98, + "Tatoeba (hsb-eng)": 36.1, + "Tatoeba (hun-eng)": 91.58, + "Tatoeba (hye-eng)": 93.28, + "Tatoeba (ido-eng)": 40.25, + "Tatoeba (ile-eng)": 57.71, + "Tatoeba (ina-eng)": 79.13, + "Tatoeba (ind-eng)": 92.74, + "Tatoeba (isl-eng)": 24.07, + "Tatoeba (ita-eng)": 93.05, + "Tatoeba (jav-eng)": 17.04, + "Tatoeba (jpn-eng)": 90.41, + "Tatoeba (kab-eng)": 1.16, + "Tatoeba (kat-eng)": 95.44, + "Tatoeba (kaz-eng)": 34.89, + "Tatoeba (khm-eng)": 32.11, + "Tatoeba (kor-eng)": 92.52, + "Tatoeba (kur-eng)": 46.94, + "Tatoeba (kzj-eng)": 6.24, + "Tatoeba (lat-eng)": 19.47, + "Tatoeba (lfn-eng)": 47.02, + "Tatoeba (lit-eng)": 93.16, + "Tatoeba (lvs-eng)": 97.87, + "Tatoeba (mal-eng)": 32.2, + "Tatoeba (mar-eng)": 92.38, + "Tatoeba (max-eng)": 45.25, + "Tatoeba (mhr-eng)": 6.89, + "Tatoeba (mkd-eng)": 91.0, + "Tatoeba (mon-eng)": 95.04, + "Tatoeba (nds-eng)": 32.16, + "Tatoeba (nld-eng)": 94.58, + "Tatoeba (nno-eng)": 76.34, + "Tatoeba (nob-eng)": 97.73, + "Tatoeba (nov-eng)": 47.99, + "Tatoeba (oci-eng)": 38.57, + "Tatoeba (orv-eng)": 15.1, + "Tatoeba (pam-eng)": 5.41, + "Tatoeba (pes-eng)": 92.59, + "Tatoeba (pms-eng)": 30.7, + "Tatoeba (pol-eng)": 94.28, + "Tatoeba (por-eng)": 92.13, + "Tatoeba (ron-eng)": 95.3, + "Tatoeba (rus-eng)": 91.87, + "Tatoeba (slk-eng)": 95.15, + "Tatoeba (slv-eng)": 96.92, + "Tatoeba (spa-eng)": 95.42, + "Tatoeba (sqi-eng)": 98.17, + "Tatoeba (srp-eng)": 92.24, + "Tatoeba (swe-eng)": 94.42, + "Tatoeba (swg-eng)": 26.31, + "Tatoeba (swh-eng)": 14.48, + "Tatoeba (tam-eng)": 24.64, + "Tatoeba (tat-eng)": 10.25, + "Tatoeba (tel-eng)": 36.4, + "Tatoeba (tgl-eng)": 13.09, + "Tatoeba (tha-eng)": 96.72, + "Tatoeba (tuk-eng)": 15.16, + "Tatoeba (tur-eng)": 95.08, + "Tatoeba (tzl-eng)": 25.46, + "Tatoeba (uig-eng)": 24.39, + "Tatoeba (ukr-eng)": 92.82, + "Tatoeba (urd-eng)": 94.57, + "Tatoeba (uzb-eng)": 17.14, + "Tatoeba (vie-eng)": 95.12, + "Tatoeba (war-eng)": 7.25, + "Tatoeba (wuu-eng)": 76.0, + "Tatoeba (xho-eng)": 4.52, + "Tatoeba (yid-eng)": 14.38, + "Tatoeba (yue-eng)": 71.45, + "Tatoeba (zsm-eng)": 95.31 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "paraphrase-multilingual-MiniLM-L12-v2", + "AllegroReviews (pol-Latn)": 30.85, + "AllegroReviews": 30.88, + "AmazonCounterfactualClassification (en-ext)": 69.99, + "AmazonCounterfactualClassification (en)": 71.57, + "AmazonCounterfactualClassification (deu-Latn)": 68.36, + "AmazonCounterfactualClassification (jpn-Jpan)": 63.37, + "AmazonCounterfactualClassification (de)": 68.35, + "AmazonCounterfactualClassification (ja)": 63.45, + "AmazonPolarityClassification": 69.21, + "AmazonReviewsClassification (en)": 35.11, + "AmazonReviewsClassification (deu-Latn)": 35.91, + "AmazonReviewsClassification (spa-Latn)": 37.49, + "AmazonReviewsClassification (fra-Latn)": 35.29, + "AmazonReviewsClassification (jpn-Jpan)": 33.21, + "AmazonReviewsClassification (cmn-Hans)": 35.24, + "AmazonReviewsClassification (de)": 35.91, + "AmazonReviewsClassification (es)": 37.49, + "AmazonReviewsClassification (fr)": 35.3, + "AmazonReviewsClassification (ja)": 33.24, + "AmazonReviewsClassification (zh)": 35.26, + "AngryTweetsClassification (dan-Latn)": 50.9, + "Banking77Classification": 79.77, + "CBD (pol-Latn)": 57.71, + "CBD": 57.68, + "DanishPoliticalCommentsClassification (dan-Latn)": 37.58, + "EmotionClassification": 42.37, + "GeoreviewClassification (rus-Cyrl)": 38.24, + "HeadlineClassification (rus-Cyrl)": 68.3, + "IFlyTek (cmn-Hans)": 39.88, + "ImdbClassification": 60.46, + "InappropriatenessClassification (rus-Cyrl)": 58.18, + "JDReview (cmn-Hans)": 70.26, + "KinopoiskClassification (rus-Cyrl)": 41.45, + "LccSentimentClassification (dan-Latn)": 54.53, + "MTOPDomainClassification (en)": 87.06, + "MTOPDomainClassification (deu-Latn)": 79.21, + "MTOPDomainClassification (spa-Latn)": 83.06, + "MTOPDomainClassification (fra-Latn)": 78.64, + "MTOPDomainClassification (hin-Deva)": 81.36, + "MTOPDomainClassification (tha-Thai)": 79.97, + "MTOPDomainClassification (de)": 79.2, + "MTOPDomainClassification (es)": 83.04, + "MTOPDomainClassification (fr)": 78.63, + "MTOPDomainClassification (hi)": 81.36, + "MTOPDomainClassification (th)": 79.99, + "MTOPIntentClassification (en)": 65.52, + "MTOPIntentClassification (deu-Latn)": 54.21, + "MTOPIntentClassification (spa-Latn)": 60.3, + "MTOPIntentClassification (fra-Latn)": 54.01, + "MTOPIntentClassification (hin-Deva)": 59.92, + "MTOPIntentClassification (tha-Thai)": 61.97, + "MTOPIntentClassification (de)": 54.23, + "MTOPIntentClassification (es)": 60.28, + "MTOPIntentClassification (fr)": 54.05, + "MTOPIntentClassification (hi)": 59.9, + "MTOPIntentClassification (th)": 61.96, + "MasakhaNEWSClassification (amh-Ethi)": 64.28, + "MasakhaNEWSClassification (eng)": 74.7, + "MasakhaNEWSClassification (fra-Latn)": 71.68, + "MasakhaNEWSClassification (hau-Latn)": 47.96, + "MasakhaNEWSClassification (ibo-Latn)": 42.46, + "MasakhaNEWSClassification (lin-Latn)": 59.26, + "MasakhaNEWSClassification (lug-Latn)": 42.29, + "MasakhaNEWSClassification (orm-Ethi)": 34.98, + "MasakhaNEWSClassification (pcm-Latn)": 89.54, + "MasakhaNEWSClassification (run-Latn)": 47.2, + "MasakhaNEWSClassification (sna-Latn)": 57.56, + "MasakhaNEWSClassification (som-Latn)": 34.8, + "MasakhaNEWSClassification (swa-Latn)": 46.05, + "MasakhaNEWSClassification (tir-Ethi)": 27.94, + "MasakhaNEWSClassification (xho-Latn)": 44.81, + "MasakhaNEWSClassification (yor-Latn)": 52.92, + "MasakhaNEWSClassification (fra)": 76.09, + "MassiveIntentClassification (en)": 66.89, + "MassiveIntentClassification (kat-Geor)": 43.03, + "MassiveIntentClassification (vie-Latn)": 56.62, + "MassiveIntentClassification (tur-Latn)": 59.91, + "MassiveIntentClassification (deu-Latn)": 50.71, + "MassiveIntentClassification (isl-Latn)": 30.87, + "MassiveIntentClassification (tam-Taml)": 36.82, + "MassiveIntentClassification (kan-Knda)": 41.0, + "MassiveIntentClassification (mon-Cyrl)": 51.77, + "MassiveIntentClassification (pol-Latn)": 59.48, + "MassiveIntentClassification (spa-Latn)": 59.7, + "MassiveIntentClassification (ben-Beng)": 35.38, + "MassiveIntentClassification (por-Latn)": 61.29, + "MassiveIntentClassification (amh-Ethi)": 36.77, + "MassiveIntentClassification (cym-Latn)": 26.13, + "MassiveIntentClassification (ind-Latn)": 59.9, + "MassiveIntentClassification (ron-Latn)": 58.44, + "MassiveIntentClassification (cmo-Hant)": 58.74, + "MassiveIntentClassification (dan-Latn)": 57.75, + "MassiveIntentClassification (swe-Latn)": 59.43, + "MassiveIntentClassification (ara-Arab)": 45.15, + "MassiveIntentClassification (ita-Latn)": 59.66, + "MassiveIntentClassification (jpn-Jpan)": 60.9, + "MassiveIntentClassification (swa-Latn)": 29.56, + "MassiveIntentClassification (cmo-Hans)": 62.0, + "MassiveIntentClassification (aze-Latn)": 47.43, + "MassiveIntentClassification (hin-Deva)": 58.37, + "MassiveIntentClassification (fra-Latn)": 60.24, + "MassiveIntentClassification (hun-Latn)": 60.44, + "MassiveIntentClassification (jav-Latn)": 32.37, + "MassiveIntentClassification (slv-Latn)": 57.34, + "MassiveIntentClassification (ell-Grek)": 58.7, + "MassiveIntentClassification (hye-Armn)": 51.6, + "MassiveIntentClassification (nob-Latn)": 55.52, + "MassiveIntentClassification (rus-Cyrl)": 59.06, + "MassiveIntentClassification (fas-Arab)": 61.03, + "MassiveIntentClassification (mal-Mlym)": 42.44, + "MassiveIntentClassification (tha-Thai)": 58.92, + "MassiveIntentClassification (afr-Latn)": 45.87, + "MassiveIntentClassification (tel-Telu)": 40.77, + "MassiveIntentClassification (urd-Arab)": 52.79, + "MassiveIntentClassification (tgl-Latn)": 33.67, + "MassiveIntentClassification (nld-Latn)": 59.52, + "MassiveIntentClassification (fin-Latn)": 57.56, + "MassiveIntentClassification (lav-Latn)": 54.72, + "MassiveIntentClassification (sqi-Latn)": 56.6, + "MassiveIntentClassification (khm-Khmr)": 40.04, + "MassiveIntentClassification (msa-Latn)": 54.81, + "MassiveIntentClassification (heb-Hebr)": 52.55, + "MassiveIntentClassification (mya-Mymr)": 52.03, + "MassiveIntentClassification (kor-Kore)": 50.36, + "MassiveIntentClassification (pl)": 59.43, + "MassiveIntentClassification (fr)": 57.52, + "MassiveScenarioClassification (khm-Khmr)": 46.95, + "MassiveScenarioClassification (kan-Knda)": 45.72, + "MassiveScenarioClassification (isl-Latn)": 37.55, + "MassiveScenarioClassification (nob-Latn)": 64.25, + "MassiveScenarioClassification (swe-Latn)": 67.14, + "MassiveScenarioClassification (nld-Latn)": 65.53, + "MassiveScenarioClassification (slv-Latn)": 64.01, + "MassiveScenarioClassification (jpn-Jpan)": 66.49, + "MassiveScenarioClassification (spa-Latn)": 65.07, + "MassiveScenarioClassification (kor-Kore)": 55.71, + "MassiveScenarioClassification (fas-Arab)": 65.89, + "MassiveScenarioClassification (jav-Latn)": 38.62, + "MassiveScenarioClassification (aze-Latn)": 52.09, + "MassiveScenarioClassification (kat-Geor)": 50.66, + "MassiveScenarioClassification (rus-Cyrl)": 65.25, + "MassiveScenarioClassification (fra-Latn)": 66.09, + "MassiveScenarioClassification (fin-Latn)": 63.74, + "MassiveScenarioClassification (dan-Latn)": 66.87, + "MassiveScenarioClassification (ben-Beng)": 41.19, + "MassiveScenarioClassification (tur-Latn)": 66.53, + "MassiveScenarioClassification (ind-Latn)": 66.17, + "MassiveScenarioClassification (por-Latn)": 65.83, + "MassiveScenarioClassification (cym-Latn)": 31.71, + "MassiveScenarioClassification (pol-Latn)": 65.04, + "MassiveScenarioClassification (sqi-Latn)": 64.34, + "MassiveScenarioClassification (mal-Mlym)": 47.73, + "MassiveScenarioClassification (tel-Telu)": 46.49, + "MassiveScenarioClassification (en)": 71.54, + "MassiveScenarioClassification (ell-Grek)": 66.14, + "MassiveScenarioClassification (tha-Thai)": 67.05, + "MassiveScenarioClassification (tgl-Latn)": 37.39, + "MassiveScenarioClassification (msa-Latn)": 61.73, + "MassiveScenarioClassification (ara-Arab)": 51.71, + "MassiveScenarioClassification (heb-Hebr)": 59.22, + "MassiveScenarioClassification (deu-Latn)": 57.4, + "MassiveScenarioClassification (mya-Mymr)": 59.09, + "MassiveScenarioClassification (ron-Latn)": 64.2, + "MassiveScenarioClassification (hin-Deva)": 65.23, + "MassiveScenarioClassification (hun-Latn)": 66.57, + "MassiveScenarioClassification (afr-Latn)": 53.63, + "MassiveScenarioClassification (tam-Taml)": 42.63, + "MassiveScenarioClassification (hye-Armn)": 56.11, + "MassiveScenarioClassification (vie-Latn)": 60.73, + "MassiveScenarioClassification (lav-Latn)": 59.82, + "MassiveScenarioClassification (mon-Cyrl)": 57.07, + "MassiveScenarioClassification (urd-Arab)": 60.41, + "MassiveScenarioClassification (cmo-Hans)": 67.45, + "MassiveScenarioClassification (swa-Latn)": 34.86, + "MassiveScenarioClassification (amh-Ethi)": 41.89, + "MassiveScenarioClassification (ita-Latn)": 65.01, + "MassiveScenarioClassification (cmo-Hant)": 65.72, + "MassiveScenarioClassification (pl)": 65.04, + "MassiveScenarioClassification (fr)": 64.52, + "MultilingualSentiment (cmn-Hans)": 61.9, + "NoRecClassification (nob-Latn)": 46.7, + "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 42.52, + "OnlineShopping (cmn-Hans)": 84.89, + "PAC (pol-Latn)": 65.75, + "PAC": 65.76, + "PolEmo2.0-IN (pol-Latn)": 57.76, + "PolEmo2.0-IN": 57.76, + "PolEmo2.0-OUT (pol-Latn)": 28.66, + "PolEmo2.0-OUT": 28.7, + "RuReviewsClassification (rus-Cyrl)": 58.88, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 53.19, + "RuSciBenchOECDClassification (rus-Cyrl)": 41.41, + "TNews (cmn-Hans)": 39.19, + "ToxicConversationsClassification": 66.07, + "TweetSentimentExtractionClassification": 56.12, + "Waimai (cmn-Hans)": 82.27 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "paraphrase-multilingual-MiniLM-L12-v2", + "8TagsClustering": 23.24, + "AlloProfClusteringP2P": 56.06, + "AlloProfClusteringS2S": 42.16, + "ArxivClusteringP2P": 38.33, + "ArxivClusteringS2S": 31.55, + "BiorxivClusteringP2P": 33.49, + "BiorxivClusteringS2S": 29.44, + "BlurbsClusteringP2P": 32.46, + "BlurbsClusteringS2S": 14.33, + "GeoreviewClusteringP2P (rus-Cyrl)": 53.35, + "HALClusteringS2S": 23.21, + "MLSUMClusteringP2P (rus-Cyrl)": 37.0, + "MLSUMClusteringP2P": 39.97, + "MLSUMClusteringS2S (rus-Cyrl)": 38.16, + "MLSUMClusteringS2S": 36.55, + "MasakhaNEWSClusteringP2P (amh-Ethi)": 40.36, + "MasakhaNEWSClusteringP2P (eng)": 49.96, + "MasakhaNEWSClusteringP2P (fra-Latn)": 40.85, + "MasakhaNEWSClusteringP2P (hau-Latn)": 19.39, + "MasakhaNEWSClusteringP2P (ibo-Latn)": 33.81, + "MasakhaNEWSClusteringP2P (lin-Latn)": 51.98, + "MasakhaNEWSClusteringP2P (lug-Latn)": 41.88, + "MasakhaNEWSClusteringP2P (orm-Ethi)": 22.23, + "MasakhaNEWSClusteringP2P (pcm-Latn)": 64.64, + "MasakhaNEWSClusteringP2P (run-Latn)": 48.03, + "MasakhaNEWSClusteringP2P (sna-Latn)": 44.62, + "MasakhaNEWSClusteringP2P (som-Latn)": 27.54, + "MasakhaNEWSClusteringP2P (swa-Latn)": 22.69, + "MasakhaNEWSClusteringP2P (tir-Ethi)": 42.02, + "MasakhaNEWSClusteringP2P (xho-Latn)": 27.68, + "MasakhaNEWSClusteringP2P (yor-Latn)": 27.29, + "MasakhaNEWSClusteringP2P (fra)": 36.58, + "MasakhaNEWSClusteringS2S (amh-Ethi)": 42.28, + "MasakhaNEWSClusteringS2S (eng)": 25.74, + "MasakhaNEWSClusteringS2S (fra-Latn)": 36.5, + "MasakhaNEWSClusteringS2S (hau-Latn)": 9.2, + "MasakhaNEWSClusteringS2S (ibo-Latn)": 33.37, + "MasakhaNEWSClusteringS2S (lin-Latn)": 47.76, + "MasakhaNEWSClusteringS2S (lug-Latn)": 45.15, + "MasakhaNEWSClusteringS2S (orm-Ethi)": 22.08, + "MasakhaNEWSClusteringS2S (pcm-Latn)": 58.42, + "MasakhaNEWSClusteringS2S (run-Latn)": 47.41, + "MasakhaNEWSClusteringS2S (sna-Latn)": 43.0, + "MasakhaNEWSClusteringS2S (som-Latn)": 26.22, + "MasakhaNEWSClusteringS2S (swa-Latn)": 13.53, + "MasakhaNEWSClusteringS2S (tir-Ethi)": 42.4, + "MasakhaNEWSClusteringS2S (xho-Latn)": 21.03, + "MasakhaNEWSClusteringS2S (yor-Latn)": 27.04, + "MasakhaNEWSClusteringS2S (fra)": 33.9, + "MedrxivClusteringP2P": 31.52, + "MedrxivClusteringS2S": 30.87, + "RedditClustering": 42.02, + "RedditClusteringP2P": 50.73, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 48.22, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.68, + "StackExchangeClustering": 49.6, + "StackExchangeClusteringP2P": 31.69, + "TenKGnadClusteringP2P": 36.13, + "TenKGnadClusteringS2S": 22.26, + "TwentyNewsgroupsClustering": 39.28 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "paraphrase-multilingual-MiniLM-L12-v2", + "CDSC-E (pol-Latn)": 72.22, + "CDSC-E": 72.22, + "OpusparcusPC (deu-Latn)": 96.63, + "OpusparcusPC (en)": 98.59, + "OpusparcusPC (fin-Latn)": 93.2, + "OpusparcusPC (fra-Latn)": 92.01, + "OpusparcusPC (rus-Cyrl)": 88.25, + "OpusparcusPC (swe-Latn)": 93.99, + "OpusparcusPC (fr)": 92.01, + "PPC": 91.8, + "PSC (pol-Latn)": 97.14, + "PSC": 97.14, + "PawsXPairClassification (deu-Latn)": 53.26, + "PawsXPairClassification (en)": 55.94, + "PawsXPairClassification (spa-Latn)": 54.61, + "PawsXPairClassification (fra-Latn)": 56.94, + "PawsXPairClassification (jpn-Hira)": 48.66, + "PawsXPairClassification (kor-Hang)": 49.69, + "PawsXPairClassification (cmn-Hans)": 54.3, + "PawsXPairClassification (fr)": 56.94, + "SICK-E-PL (pol-Latn)": 71.94, + "SICK-E-PL": 71.94, + "SprintDuplicateQuestions": 89.46, + "TERRa (rus-Cyrl)": 58.56, + "TwitterSemEval2015": 62.06, + "TwitterURLCorpus": 83.83 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "paraphrase-multilingual-MiniLM-L12-v2", + "AlloprofReranking (fra-Latn)": 62.42, + "AlloprofReranking": 49.01, + "AskUbuntuDupQuestions": 60.49, + "MMarcoReranking (cmn-Hans)": 16.14, + "MindSmallReranking": 30.37, + "RuBQReranking (rus-Cyrl)": 52.8, + "SciDocsRR": 77.78, + "StackOverflowDupQuestions": 45.85, + "SyntecReranking (fra-Latn)": 72.5, + "SyntecReranking": 75.03, + "T2Reranking (cmn-Hans)": 65.28 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "paraphrase-multilingual-MiniLM-L12-v2", + "AILACasedocs": 13.66, + "AILAStatutes": 20.52, + "ARCChallenge": 6.19, + "AlloprofRetrieval (fra-Latn)": 26.63, + "AlloprofRetrieval": 26.63, + "AlphaNLI": 20.89, + "ArguAna": 44.88, + "ArguAna-PL (pol-Latn)": 37.86, + "ArguAna-PL": 37.83, + "BSARDRetrieval (fra-Latn)": 9.6, + "BSARDRetrieval": 0.0, + "CQADupstackRetrieval": 30.7, + "ClimateFEVER": 18.49, + "CmedqaRetrieval (cmn-Hans)": 10.78, + "CovidRetrieval (cmn-Hans)": 30.11, + "DBPedia": 22.63, + "DBPedia-PL": 18.0, + "DuRetrieval (cmn-Hans)": 34.72, + "EcomRetrieval (cmn-Hans)": 13.32, + "FEVER": 52.66, + "FiQA-PL (pol-Latn)": 12.49, + "FiQA-PL": 12.49, + "FiQA2018": 20.33, + "GerDaLIRSmall (deu-Latn)": 2.62, + "HellaSwag": 16.98, + "HotpotQA": 30.01, + "HotpotQA-PL": 22.76, + "LEMBNarrativeQARetrieval": 13.82, + "LEMBNeedleRetrieval": 13.5, + "LEMBPasskeyRetrieval": 8.25, + "LEMBQMSumRetrieval": 11.02, + "LEMBSummScreenFDRetrieval": 38.12, + "LEMBWikimQARetrieval": 40.84, + "LeCaRDv2 (zho-Hans)": 32.03, + "LegalBenchConsumerContractsQA": 49.81, + "LegalBenchCorporateLobbying": 88.51, + "LegalQuAD (deu-Latn)": 13.31, + "LegalSummarization": 54.97, + "MMarcoRetrieval (cmn-Hans)": 46.62, + "MSMARCO": 23.72, + "MSMARCO-PL": 10.39, + "MedicalRetrieval (cmn-Hans)": 15.46, + "MintakaRetrieval (ara-Arab)": 12.61, + "MintakaRetrieval (deu-Latn)": 21.77, + "MintakaRetrieval (spa-Latn)": 21.59, + "MintakaRetrieval (fra-Latn)": 21.53, + "MintakaRetrieval (hin-Deva)": 16.76, + "MintakaRetrieval (ita-Latn)": 22.23, + "MintakaRetrieval (jpn-Hira)": 14.33, + "MintakaRetrieval (por-Latn)": 22.52, + "MintakaRetrieval (fr)": 21.53, + "NFCorpus": 23.45, + "NFCorpus-PL (pol-Latn)": 17.17, + "NFCorpus-PL": 17.16, + "NQ": 29.8, + "NQ-PL": 12.56, + "PIQA": 15.79, + "Quail": 2.96, + "Quora-PL": 77.18, + "QuoraRetrieval": 86.55, + "RARbCode": 8.48, + "RARbMath": 30.02, + "RiaNewsRetrieval (rus-Cyrl)": 44.82, + "RuBQRetrieval (rus-Cyrl)": 29.7, + "SCIDOCS": 0.03, + "SCIDOCS-PL (pol-Latn)": 10.26, + "SCIDOCS-PL": 10.26, + "SIQA": 0.88, + "SciFact": 48.37, + "SciFact-PL (pol-Latn)": 40.24, + "SciFact-PL": 40.24, + "SpartQA": 4.94, + "SyntecRetrieval (fra-Latn)": 65.54, + "SyntecRetrieval": 65.54, + "T2Retrieval (cmn-Hans)": 30.31, + "TRECCOVID": 39.12, + "TRECCOVID-PL (pol-Latn)": 34.23, + "TRECCOVID-PL": 34.38, + "TempReasonL1": 1.43, + "TempReasonL2Fact": 6.21, + "TempReasonL2Pure": 0.22, + "TempReasonL3Fact": 6.77, + "TempReasonL3Pure": 4.9, + "Touche2020": 16.06, + "VideoRetrieval (cmn-Hans)": 14.71, + "WinoGrande": 46.52, + "XPQARetrieval (ara-Arab_ara-Arab)": 22.97, + "XPQARetrieval (eng-Latn_ara-Arab)": 17.17, + "XPQARetrieval (ara-Arab_eng-Latn)": 25.5, + "XPQARetrieval (deu-Latn_deu-Latn)": 42.62, + "XPQARetrieval (eng-Latn_deu-Latn)": 26.52, + "XPQARetrieval (deu-Latn_eng-Latn)": 48.73, + "XPQARetrieval (spa-Latn_spa-Latn)": 38.24, + "XPQARetrieval (eng-Latn_spa-Latn)": 26.09, + "XPQARetrieval (spa-Latn_eng-Latn)": 41.51, + "XPQARetrieval (fra-Latn_fra-Latn)": 42.51, + "XPQARetrieval (eng-Latn_fra-Latn)": 26.09, + "XPQARetrieval (fra-Latn_eng-Latn)": 43.08, + "XPQARetrieval (hin-Deva_hin-Deva)": 52.09, + "XPQARetrieval (eng-Latn_hin-Deva)": 24.08, + "XPQARetrieval (hin-Deva_eng-Latn)": 49.11, + "XPQARetrieval (ita-Latn_ita-Latn)": 51.63, + "XPQARetrieval (eng-Latn_ita-Latn)": 29.34, + "XPQARetrieval (ita-Latn_eng-Latn)": 46.53, + "XPQARetrieval (jpn-Hira_jpn-Hira)": 51.57, + "XPQARetrieval (eng-Latn_jpn-Hira)": 23.87, + "XPQARetrieval (jpn-Hira_eng-Latn)": 44.93, + "XPQARetrieval (kor-Hang_kor-Hang)": 21.34, + "XPQARetrieval (eng-Latn_kor-Hang)": 21.51, + "XPQARetrieval (kor-Hang_eng-Latn)": 22.59, + "XPQARetrieval (pol-Latn_pol-Latn)": 28.45, + "XPQARetrieval (eng-Latn_pol-Latn)": 17.08, + "XPQARetrieval (pol-Latn_eng-Latn)": 26.57, + "XPQARetrieval (por-Latn_por-Latn)": 32.33, + "XPQARetrieval (eng-Latn_por-Latn)": 19.76, + "XPQARetrieval (por-Latn_eng-Latn)": 34.2, + "XPQARetrieval (tam-Taml_tam-Taml)": 6.36, + "XPQARetrieval (eng-Latn_tam-Taml)": 5.36, + "XPQARetrieval (tam-Taml_eng-Latn)": 9.03, + "XPQARetrieval (cmn-Hans_cmn-Hans)": 44.16, + "XPQARetrieval (eng-Latn_cmn-Hans)": 19.03, + "XPQARetrieval (cmn-Hans_eng-Latn)": 40.08, + "XPQARetrieval (fr)": 42.51 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "paraphrase-multilingual-MiniLM-L12-v2", + "AFQMC (cmn-Hans)": 14.3, + "ATEC (cmn-Hans)": 18.42, + "BIOSSES": 74.18, + "BQ (cmn-Hans)": 38.53, + "CDSC-R (pol-Latn)": 88.98, + "CDSC-R": 88.98, + "LCQMC (cmn-Hans)": 63.96, + "PAWSX (cmn-Hans)": 10.13, + "RUParaPhraserSTS (rus-Cyrl)": 61.87, + "RuSTSBenchmarkSTS (rus-Cyrl)": 79.55, + "SICK-R": 79.61, + "SICK-R-PL (pol-Latn)": 68.77, + "SICK-R-PL": 68.77, + "SICKFr (fra-Latn)": 75.1, + "SICKFr": 75.1, + "STS12": 76.02, + "STS13": 80.7, + "STS14": 78.85, + "STS15": 85.84, + "STS16": 81.05, + "STS17 (fra-Latn_eng-Latn)": 76.59, + "STS17 (nld-Latn_eng-Latn)": 81.71, + "STS17 (ita-Latn_eng-Latn)": 82.35, + "STS17 (kor-Hang)": 77.03, + "STS17 (ara-Arab)": 79.16, + "STS17 (eng-Latn_ara-Arab)": 81.22, + "STS17 (spa-Latn_eng-Latn)": 84.44, + "STS17 (spa-Latn)": 85.56, + "STS17 (eng-Latn_deu-Latn)": 84.22, + "STS17 (en-en)": 86.87, + "STS17 (eng-Latn_tur-Latn)": 76.74, + "STS17 (ar-ar)": 79.16, + "STS17 (en-ar)": 81.22, + "STS17 (en-de)": 84.22, + "STS17 (en-tr)": 76.74, + "STS17 (es-en)": 84.44, + "STS17 (es-es)": 85.56, + "STS17 (fr-en)": 76.59, + "STS17 (it-en)": 82.35, + "STS17 (ko-ko)": 77.03, + "STS17 (nl-en)": 81.71, + "STS22 (ara-Arab)": 46.2, + "STS22 (spa-Latn_eng-Latn)": 67.33, + "STS22 (cmn-Hans)": 58.75, + "STS22 (fra-Latn)": 70.55, + "STS22 (en)": 62.07, + "STS22 (deu-Latn)": 44.64, + "STS22 (pol-Latn)": 33.74, + "STS22 (rus-Cyrl)": 57.08, + "STS22 (pol-Latn_eng-Latn)": 69.02, + "STS22 (deu-Latn_eng-Latn)": 52.65, + "STS22 (cmn-Hans_eng-Latn)": 65.71, + "STS22 (tur-Latn)": 53.39, + "STS22 (spa-Latn)": 56.56, + "STS22 (deu-Latn_pol-Latn)": 44.22, + "STS22 (spa-Latn_ita-Latn)": 47.67, + "STS22 (deu-Latn_fra-Latn)": 51.73, + "STS22 (fra-Latn_pol-Latn)": 50.71, + "STS22 (ita-Latn)": 55.22, + "STS22 (pl)": 33.73, + "STS22 (fr)": 70.55, + "STSB (cmn-Hans)": 78.91, + "STSBenchmark": 84.42, + "STSBenchmarkMultilingualSTS (spa-Latn)": 81.1, + "STSBenchmarkMultilingualSTS (fra-Latn)": 79.9, + "STSBenchmarkMultilingualSTS (cmn-Hans)": 80.47, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 79.32, + "STSBenchmarkMultilingualSTS (ita-Latn)": 80.39, + "STSBenchmarkMultilingualSTS (pol-Latn)": 78.29, + "STSBenchmarkMultilingualSTS (por-Latn)": 80.16, + "STSBenchmarkMultilingualSTS (deu-Latn)": 78.87, + "STSBenchmarkMultilingualSTS (nld-Latn)": 79.54, + "STSBenchmarkMultilingualSTS (en)": 84.42, + "STSBenchmarkMultilingualSTS (fr)": 79.9 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "paraphrase-multilingual-MiniLM-L12-v2", + "SummEval": 30.67, + "SummEvalFr (fra-Latn)": 29.2, + "SummEvalFr": 29.2 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "paraphrase-multilingual-MiniLM-L12-v2" + } + ] + } + }, + "mistral-7b-instruct-v0.2": { + "BitextMining": { + "f1": [ + { + "Model": "mistral-7b-instruct-v0.2" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "mistral-7b-instruct-v0.2" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "mistral-7b-instruct-v0.2" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "mistral-7b-instruct-v0.2" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "mistral-7b-instruct-v0.2" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "mistral-7b-instruct-v0.2" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "mistral-7b-instruct-v0.2" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "mistral-7b-instruct-v0.2" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "mistral-7b-instruct-v0.2", + "Core17InstructionRetrieval": 13.03, + "News21InstructionRetrieval": 4.81, + "Robust04InstructionRetrieval": 12.61 + } + ] + } + }, + "st-polish-paraphrase-from-distilroberta": { + "BitextMining": { + "f1": [ + { + "Model": "st-polish-paraphrase-from-distilroberta" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "st-polish-paraphrase-from-distilroberta", + "AllegroReviews": 34.5, + "CBD": 70.27, + "MassiveIntentClassification (pl)": 64.81, + "MassiveScenarioClassification (pl)": 70.01, + "PAC": 64.6, + "PolEmo2.0-IN": 67.06, + "PolEmo2.0-OUT": 38.58 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "st-polish-paraphrase-from-distilroberta", + "8TagsClustering": 31.68 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "st-polish-paraphrase-from-distilroberta", + "CDSC-E": 75.99, + "PPC": 93.29, + "PSC": 99.1, + "SICK-E-PL": 79.63 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "st-polish-paraphrase-from-distilroberta" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "st-polish-paraphrase-from-distilroberta", + "ArguAna-PL": 49.42, + "DBPedia-PL": 19.82, + "FiQA-PL": 19.58, + "HotpotQA-PL": 23.47, + "MSMARCO-PL": 16.51, + "NFCorpus-PL": 22.49, + "NQ-PL": 19.83, + "Quora-PL": 81.17, + "SCIDOCS-PL": 12.15, + "SciFact-PL": 49.49, + "TRECCOVID-PL": 38.97 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "st-polish-paraphrase-from-distilroberta", + "CDSC-R": 89.62, + "SICK-R-PL": 76.37, + "STS22 (pl)": 40.36 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "st-polish-paraphrase-from-distilroberta" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "st-polish-paraphrase-from-distilroberta" + } + ] + } + }, + "deberta-v1-base": { + "BitextMining": { + "f1": [ + { + "Model": "deberta-v1-base", + "Tatoeba (rus-Cyrl_eng-Latn)": 13.21 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "deberta-v1-base", + "GeoreviewClassification (rus-Cyrl)": 40.19, + "HeadlineClassification (rus-Cyrl)": 78.75, + "InappropriatenessClassification (rus-Cyrl)": 61.33, + "KinopoiskClassification (rus-Cyrl)": 48.78, + "MassiveIntentClassification (rus-Cyrl)": 61.32, + "MassiveScenarioClassification (rus-Cyrl)": 64.71, + "RuReviewsClassification (rus-Cyrl)": 55.66, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 53.53, + "RuSciBenchOECDClassification (rus-Cyrl)": 41.34 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "deberta-v1-base", + "GeoreviewClusteringP2P (rus-Cyrl)": 58.79, + "MLSUMClusteringP2P (rus-Cyrl)": 47.33, + "MLSUMClusteringS2S (rus-Cyrl)": 44.6, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 36.66, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 33.31 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "deberta-v1-base", + "OpusparcusPC (rus-Cyrl)": 83.31, + "TERRa (rus-Cyrl)": 53.78 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "deberta-v1-base", + "RuBQReranking (rus-Cyrl)": 34.01 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "deberta-v1-base", + "RiaNewsRetrieval (rus-Cyrl)": 4.84, + "RuBQRetrieval (rus-Cyrl)": 10.15 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "deberta-v1-base", + "RUParaPhraserSTS (rus-Cyrl)": 54.03, + "RuSTSBenchmarkSTS (rus-Cyrl)": 58.47, + "STS22 (rus-Cyrl)": 47.67, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 58.45 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "deberta-v1-base" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "deberta-v1-base" + } + ] + } + }, + "multilingual-e5-large": { + "BitextMining": { + "f1": [ + { + "Model": "multilingual-e5-large", + "BornholmBitextMining (dan-Latn)": 29.61, + "BornholmBitextMining": 44.16, + "Tatoeba (tgl-Latn_eng-Latn)": 92.0, + "Tatoeba (gsw-Latn_eng-Latn)": 51.65, + "Tatoeba (tzl-Latn_eng-Latn)": 53.16, + "Tatoeba (slv-Latn_eng-Latn)": 89.57, + "Tatoeba (jav-Latn_eng-Latn)": 75.46, + "Tatoeba (uig-Arab_eng-Latn)": 72.17, + "Tatoeba (ind-Latn_eng-Latn)": 92.9, + "Tatoeba (rus-Cyrl_eng-Latn)": 92.32, + "Tatoeba (war-Latn_eng-Latn)": 62.02, + "Tatoeba (mar-Deva_eng-Latn)": 88.58, + "Tatoeba (mkd-Cyrl_eng-Latn)": 85.63, + "Tatoeba (jpn-Jpan_eng-Latn)": 95.28, + "Tatoeba (hun-Latn_eng-Latn)": 94.01, + "Tatoeba (slk-Latn_eng-Latn)": 93.13, + "Tatoeba (tha-Thai_eng-Latn)": 95.38, + "Tatoeba (fra-Latn_eng-Latn)": 93.42, + "Tatoeba (ukr-Cyrl_eng-Latn)": 93.32, + "Tatoeba (kat-Geor_eng-Latn)": 84.09, + "Tatoeba (nov-Latn_eng-Latn)": 71.62, + "Tatoeba (kor-Hang_eng-Latn)": 90.65, + "Tatoeba (ben-Beng_eng-Latn)": 83.02, + "Tatoeba (cor-Latn_eng-Latn)": 6.28, + "Tatoeba (lfn-Latn_eng-Latn)": 62.91, + "Tatoeba (swh-Latn_eng-Latn)": 71.61, + "Tatoeba (tur-Latn_eng-Latn)": 96.27, + "Tatoeba (cbk-Latn_eng-Latn)": 69.26, + "Tatoeba (kur-Latn_eng-Latn)": 66.83, + "Tatoeba (arq-Arab_eng-Latn)": 41.56, + "Tatoeba (ceb-Latn_eng-Latn)": 55.31, + "Tatoeba (max-Deva_eng-Latn)": 63.41, + "Tatoeba (ang-Latn_eng-Latn)": 40.18, + "Tatoeba (nds-Latn_eng-Latn)": 69.28, + "Tatoeba (epo-Latn_eng-Latn)": 96.01, + "Tatoeba (heb-Hebr_eng-Latn)": 86.61, + "Tatoeba (yue-Hant_eng-Latn)": 88.71, + "Tatoeba (dan-Latn_eng-Latn)": 95.08, + "Tatoeba (swe-Latn_eng-Latn)": 95.3, + "Tatoeba (lvs-Latn_eng-Latn)": 90.06, + "Tatoeba (ast-Latn_eng-Latn)": 81.76, + "Tatoeba (dsb-Latn_eng-Latn)": 48.44, + "Tatoeba (pes-Arab_eng-Latn)": 92.14, + "Tatoeba (dtp-Latn_eng-Latn)": 7.03, + "Tatoeba (tuk-Latn_eng-Latn)": 33.15, + "Tatoeba (isl-Latn_eng-Latn)": 92.09, + "Tatoeba (khm-Khmr_eng-Latn)": 59.96, + "Tatoeba (pam-Latn_eng-Latn)": 9.32, + "Tatoeba (tat-Cyrl_eng-Latn)": 73.51, + "Tatoeba (bos-Latn_eng-Latn)": 92.86, + "Tatoeba (spa-Latn_eng-Latn)": 97.1, + "Tatoeba (kaz-Cyrl_eng-Latn)": 79.67, + "Tatoeba (bel-Cyrl_eng-Latn)": 91.08, + "Tatoeba (zsm-Latn_eng-Latn)": 94.53, + "Tatoeba (cat-Latn_eng-Latn)": 91.03, + "Tatoeba (urd-Arab_eng-Latn)": 89.21, + "Tatoeba (mon-Cyrl_eng-Latn)": 87.53, + "Tatoeba (tam-Taml_eng-Latn)": 88.23, + "Tatoeba (fry-Latn_eng-Latn)": 63.43, + "Tatoeba (nob-Latn_eng-Latn)": 97.2, + "Tatoeba (tel-Telu_eng-Latn)": 91.34, + "Tatoeba (hye-Armn_eng-Latn)": 90.92, + "Tatoeba (awa-Deva_eng-Latn)": 72.27, + "Tatoeba (hrv-Latn_eng-Latn)": 96.15, + "Tatoeba (ile-Latn_eng-Latn)": 79.16, + "Tatoeba (amh-Ethi_eng-Latn)": 80.69, + "Tatoeba (orv-Cyrl_eng-Latn)": 39.87, + "Tatoeba (ara-Arab_eng-Latn)": 85.48, + "Tatoeba (ido-Latn_eng-Latn)": 83.52, + "Tatoeba (hin-Deva_eng-Latn)": 94.48, + "Tatoeba (por-Latn_eng-Latn)": 93.63, + "Tatoeba (ron-Latn_eng-Latn)": 94.87, + "Tatoeba (swg-Latn_eng-Latn)": 55.64, + "Tatoeba (cmn-Hans_eng-Latn)": 95.28, + "Tatoeba (pol-Latn_eng-Latn)": 96.6, + "Tatoeba (bul-Cyrl_eng-Latn)": 92.93, + "Tatoeba (ina-Latn_eng-Latn)": 93.47, + "Tatoeba (bre-Latn_eng-Latn)": 11.1, + "Tatoeba (wuu-Hans_eng-Latn)": 86.37, + "Tatoeba (lit-Latn_eng-Latn)": 88.48, + "Tatoeba (csb-Latn_eng-Latn)": 36.98, + "Tatoeba (lat-Latn_eng-Latn)": 53.37, + "Tatoeba (gle-Latn_eng-Latn)": 71.48, + "Tatoeba (ita-Latn_eng-Latn)": 93.29, + "Tatoeba (srp-Cyrl_eng-Latn)": 93.1, + "Tatoeba (arz-Arab_eng-Latn)": 74.73, + "Tatoeba (cym-Latn_eng-Latn)": 76.21, + "Tatoeba (ber-Tfng_eng-Latn)": 38.9, + "Tatoeba (xho-Latn_eng-Latn)": 80.87, + "Tatoeba (uzb-Latn_eng-Latn)": 72.35, + "Tatoeba (pms-Latn_eng-Latn)": 59.85, + "Tatoeba (est-Latn_eng-Latn)": 85.03, + "Tatoeba (deu-Latn_eng-Latn)": 99.07, + "Tatoeba (yid-Hebr_eng-Latn)": 76.33, + "Tatoeba (ell-Grek_eng-Latn)": 93.88, + "Tatoeba (afr-Latn_eng-Latn)": 90.22, + "Tatoeba (fao-Latn_eng-Latn)": 72.62, + "Tatoeba (nld-Latn_eng-Latn)": 96.63, + "Tatoeba (hsb-Latn_eng-Latn)": 58.9, + "Tatoeba (aze-Latn_eng-Latn)": 87.61, + "Tatoeba (kzj-Latn_eng-Latn)": 7.91, + "Tatoeba (kab-Latn_eng-Latn)": 36.54, + "Tatoeba (mal-Mlym_eng-Latn)": 97.7, + "Tatoeba (mhr-Cyrl_eng-Latn)": 6.79, + "Tatoeba (ces-Latn_eng-Latn)": 94.89, + "Tatoeba (gla-Latn_eng-Latn)": 59.0, + "Tatoeba (cha-Latn_eng-Latn)": 27.16, + "Tatoeba (glg-Latn_eng-Latn)": 93.34, + "Tatoeba (vie-Latn_eng-Latn)": 97.0, + "Tatoeba (oci-Latn_eng-Latn)": 54.91, + "Tatoeba (nno-Latn_eng-Latn)": 91.4, + "Tatoeba (fin-Latn_eng-Latn)": 95.44, + "Tatoeba (eus-Latn_eng-Latn)": 77.82, + "Tatoeba (sqi-Latn_eng-Latn)": 94.7 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "multilingual-e5-large", + "AllegroReviews (pol-Latn)": 41.04, + "AllegroReviews": 41.14, + "AmazonCounterfactualClassification (en-ext)": 78.73, + "AmazonCounterfactualClassification (en)": 78.67, + "AmazonCounterfactualClassification (deu-Latn)": 68.66, + "AmazonCounterfactualClassification (jpn-Jpan)": 78.8, + "AmazonPolarityClassification": 93.26, + "AmazonReviewsClassification (en)": 49.2, + "AmazonReviewsClassification (deu-Latn)": 46.5, + "AmazonReviewsClassification (spa-Latn)": 44.35, + "AmazonReviewsClassification (fra-Latn)": 42.55, + "AmazonReviewsClassification (jpn-Jpan)": 41.71, + "AmazonReviewsClassification (cmn-Hans)": 38.87, + "AmazonReviewsClassification (fr)": 41.91, + "AngryTweetsClassification (dan-Latn)": 57.69, + "AngryTweetsClassification": 54.95, + "Banking77Classification": 75.88, + "CBD (pol-Latn)": 69.84, + "CBD": 69.9, + "DKHateClassification": 66.02, + "DanishPoliticalCommentsClassification (dan-Latn)": 39.43, + "DanishPoliticalCommentsClassification": 38.27, + "EmotionClassification": 47.58, + "GeoreviewClassification (rus-Cyrl)": 49.69, + "HeadlineClassification (rus-Cyrl)": 77.19, + "IFlyTek (cmn-Hans)": 41.86, + "IFlyTek": 45.47, + "ImdbClassification": 90.23, + "InappropriatenessClassification (rus-Cyrl)": 61.6, + "JDReview (cmn-Hans)": 80.54, + "JDReview": 80.99, + "KinopoiskClassification (rus-Cyrl)": 56.59, + "LccSentimentClassification (dan-Latn)": 61.53, + "LccSentimentClassification": 59.6, + "MTOPDomainClassification (en)": 91.81, + "MTOPDomainClassification (deu-Latn)": 90.44, + "MTOPDomainClassification (spa-Latn)": 88.34, + "MTOPDomainClassification (fra-Latn)": 86.23, + "MTOPDomainClassification (hin-Deva)": 86.84, + "MTOPDomainClassification (tha-Thai)": 86.88, + "MTOPDomainClassification (fr)": 86.41, + "MTOPIntentClassification (en)": 64.29, + "MTOPIntentClassification (deu-Latn)": 65.97, + "MTOPIntentClassification (spa-Latn)": 61.9, + "MTOPIntentClassification (fra-Latn)": 56.25, + "MTOPIntentClassification (hin-Deva)": 59.17, + "MTOPIntentClassification (tha-Thai)": 62.59, + "MTOPIntentClassification (fr)": 59.43, + "MasakhaNEWSClassification (amh-Ethi)": 83.7, + "MasakhaNEWSClassification (eng)": 78.26, + "MasakhaNEWSClassification (fra-Latn)": 76.11, + "MasakhaNEWSClassification (hau-Latn)": 76.17, + "MasakhaNEWSClassification (ibo-Latn)": 70.05, + "MasakhaNEWSClassification (lin-Latn)": 75.89, + "MasakhaNEWSClassification (lug-Latn)": 73.63, + "MasakhaNEWSClassification (orm-Ethi)": 80.31, + "MasakhaNEWSClassification (pcm-Latn)": 89.15, + "MasakhaNEWSClassification (run-Latn)": 76.55, + "MasakhaNEWSClassification (sna-Latn)": 86.99, + "MasakhaNEWSClassification (som-Latn)": 64.63, + "MasakhaNEWSClassification (swa-Latn)": 73.42, + "MasakhaNEWSClassification (tir-Ethi)": 72.06, + "MasakhaNEWSClassification (xho-Latn)": 82.56, + "MasakhaNEWSClassification (yor-Latn)": 81.09, + "MasakhaNEWSClassification (fra)": 79.38, + "MassiveIntentClassification (kor-Kore)": 63.92, + "MassiveIntentClassification (lav-Latn)": 58.31, + "MassiveIntentClassification (isl-Latn)": 53.3, + "MassiveIntentClassification (tel-Telu)": 53.96, + "MassiveIntentClassification (mya-Mymr)": 49.73, + "MassiveIntentClassification (nob-Latn)": 64.54, + "MassiveIntentClassification (en)": 68.51, + "MassiveIntentClassification (spa-Latn)": 64.01, + "MassiveIntentClassification (swe-Latn)": 66.52, + "MassiveIntentClassification (cmo-Hant)": 58.78, + "MassiveIntentClassification (pol-Latn)": 65.09, + "MassiveIntentClassification (rus-Cyrl)": 65.76, + "MassiveIntentClassification (aze-Latn)": 54.68, + "MassiveIntentClassification (fin-Latn)": 64.28, + "MassiveIntentClassification (cmo-Hans)": 66.23, + "MassiveIntentClassification (urd-Arab)": 54.6, + "MassiveIntentClassification (tam-Taml)": 53.41, + "MassiveIntentClassification (hin-Deva)": 60.93, + "MassiveIntentClassification (deu-Latn)": 63.82, + "MassiveIntentClassification (ell-Grek)": 64.34, + "MassiveIntentClassification (hye-Armn)": 50.89, + "MassiveIntentClassification (por-Latn)": 65.6, + "MassiveIntentClassification (nld-Latn)": 65.0, + "MassiveIntentClassification (fas-Arab)": 63.74, + "MassiveIntentClassification (ron-Latn)": 59.76, + "MassiveIntentClassification (slv-Latn)": 59.38, + "MassiveIntentClassification (heb-Hebr)": 62.44, + "MassiveIntentClassification (vie-Latn)": 63.39, + "MassiveIntentClassification (sqi-Latn)": 57.3, + "MassiveIntentClassification (khm-Khmr)": 34.88, + "MassiveIntentClassification (ben-Beng)": 55.6, + "MassiveIntentClassification (tgl-Latn)": 54.77, + "MassiveIntentClassification (jpn-Jpan)": 67.11, + "MassiveIntentClassification (kat-Geor)": 41.45, + "MassiveIntentClassification (afr-Latn)": 53.69, + "MassiveIntentClassification (cym-Latn)": 44.22, + "MassiveIntentClassification (amh-Ethi)": 45.48, + "MassiveIntentClassification (ita-Latn)": 63.89, + "MassiveIntentClassification (mal-Mlym)": 57.58, + "MassiveIntentClassification (tha-Thai)": 62.75, + "MassiveIntentClassification (ind-Latn)": 63.51, + "MassiveIntentClassification (jav-Latn)": 48.96, + "MassiveIntentClassification (dan-Latn)": 63.7, + "MassiveIntentClassification (ara-Arab)": 54.1, + "MassiveIntentClassification (kan-Knda)": 53.45, + "MassiveIntentClassification (hun-Latn)": 64.0, + "MassiveIntentClassification (tur-Latn)": 64.61, + "MassiveIntentClassification (msa-Latn)": 58.49, + "MassiveIntentClassification (mon-Cyrl)": 49.6, + "MassiveIntentClassification (swa-Latn)": 47.69, + "MassiveIntentClassification (fra-Latn)": 63.37, + "MassiveIntentClassification (da)": 60.16, + "MassiveIntentClassification (nb)": 59.83, + "MassiveIntentClassification (sv)": 61.78, + "MassiveIntentClassification (pl)": 65.07, + "MassiveScenarioClassification (heb-Hebr)": 67.72, + "MassiveScenarioClassification (vie-Latn)": 68.91, + "MassiveScenarioClassification (cmo-Hant)": 64.35, + "MassiveScenarioClassification (urd-Arab)": 60.89, + "MassiveScenarioClassification (isl-Latn)": 60.74, + "MassiveScenarioClassification (ell-Grek)": 69.74, + "MassiveScenarioClassification (mon-Cyrl)": 55.37, + "MassiveScenarioClassification (swa-Latn)": 56.27, + "MassiveScenarioClassification (tam-Taml)": 58.76, + "MassiveScenarioClassification (hye-Armn)": 55.76, + "MassiveScenarioClassification (amh-Ethi)": 52.69, + "MassiveScenarioClassification (ben-Beng)": 61.85, + "MassiveScenarioClassification (tel-Telu)": 59.49, + "MassiveScenarioClassification (dan-Latn)": 71.18, + "MassiveScenarioClassification (slv-Latn)": 65.33, + "MassiveScenarioClassification (en)": 73.04, + "MassiveScenarioClassification (rus-Cyrl)": 70.85, + "MassiveScenarioClassification (mal-Mlym)": 63.17, + "MassiveScenarioClassification (sqi-Latn)": 63.79, + "MassiveScenarioClassification (ita-Latn)": 69.45, + "MassiveScenarioClassification (kor-Kore)": 70.54, + "MassiveScenarioClassification (cmo-Hans)": 72.25, + "MassiveScenarioClassification (cym-Latn)": 51.25, + "MassiveScenarioClassification (pol-Latn)": 69.83, + "MassiveScenarioClassification (ind-Latn)": 69.43, + "MassiveScenarioClassification (tur-Latn)": 68.12, + "MassiveScenarioClassification (tgl-Latn)": 60.71, + "MassiveScenarioClassification (hin-Deva)": 66.85, + "MassiveScenarioClassification (spa-Latn)": 69.07, + "MassiveScenarioClassification (lav-Latn)": 64.28, + "MassiveScenarioClassification (mya-Mymr)": 54.03, + "MassiveScenarioClassification (ara-Arab)": 61.0, + "MassiveScenarioClassification (kan-Knda)": 59.36, + "MassiveScenarioClassification (jav-Latn)": 56.24, + "MassiveScenarioClassification (por-Latn)": 68.33, + "MassiveScenarioClassification (tha-Thai)": 69.06, + "MassiveScenarioClassification (aze-Latn)": 58.49, + "MassiveScenarioClassification (fra-Latn)": 68.74, + "MassiveScenarioClassification (ron-Latn)": 66.06, + "MassiveScenarioClassification (nld-Latn)": 71.11, + "MassiveScenarioClassification (fas-Arab)": 67.55, + "MassiveScenarioClassification (deu-Latn)": 71.25, + "MassiveScenarioClassification (nob-Latn)": 70.44, + "MassiveScenarioClassification (msa-Latn)": 63.55, + "MassiveScenarioClassification (afr-Latn)": 62.35, + "MassiveScenarioClassification (hun-Latn)": 70.53, + "MassiveScenarioClassification (swe-Latn)": 72.77, + "MassiveScenarioClassification (kat-Geor)": 47.82, + "MassiveScenarioClassification (jpn-Jpan)": 73.16, + "MassiveScenarioClassification (khm-Khmr)": 41.14, + "MassiveScenarioClassification (fin-Latn)": 68.62, + "MassiveScenarioClassification (da)": 67.46, + "MassiveScenarioClassification (nb)": 66.18, + "MassiveScenarioClassification (sv)": 69.15, + "MassiveScenarioClassification (pl)": 69.82, + "MultilingualSentiment (cmn-Hans)": 70.81, + "MultilingualSentiment": 68.58, + "NoRecClassification (nob-Latn)": 58.43, + "NoRecClassification": 62.76, + "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 80.15, + "NordicLangClassification": 82.29, + "NorwegianParliament": 60.36, + "OnlineShopping (cmn-Hans)": 90.45, + "OnlineShopping": 90.81, + "PAC (pol-Latn)": 70.33, + "PAC": 70.37, + "PolEmo2.0-IN (pol-Latn)": 77.06, + "PolEmo2.0-IN": 77.06, + "PolEmo2.0-OUT (pol-Latn)": 53.48, + "PolEmo2.0-OUT": 53.38, + "RuReviewsClassification (rus-Cyrl)": 65.28, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 58.2, + "RuSciBenchOECDClassification (rus-Cyrl)": 43.91, + "ScalaDaClassification": 50.77, + "ScalaNbClassification": 50.44, + "TNews (cmn-Hans)": 48.8, + "TNews": 48.38, + "ToxicConversationsClassification": 66.01, + "TweetSentimentExtractionClassification": 62.8, + "Waimai (cmn-Hans)": 86.3, + "Waimai": 85.02 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "multilingual-e5-large", + "8TagsClustering": 33.88, + "AlloProfClusteringP2P": 62.99, + "AlloProfClusteringS2S": 32.26, + "BiorxivClusteringP2P": 35.5, + "BiorxivClusteringS2S": 33.3, + "CLSClusteringP2P": 40.68, + "CLSClusteringS2S": 38.59, + "GeoreviewClusteringP2P (rus-Cyrl)": 60.51, + "HALClusteringS2S": 22.44, + "MLSUMClusteringP2P (rus-Cyrl)": 42.79, + "MLSUMClusteringP2P": 44.04, + "MLSUMClusteringS2S (rus-Cyrl)": 44.32, + "MLSUMClusteringS2S": 37.65, + "MasakhaNEWSClusteringP2P (amh-Ethi)": 67.16, + "MasakhaNEWSClusteringP2P (eng)": 61.1, + "MasakhaNEWSClusteringP2P (fra-Latn)": 41.66, + "MasakhaNEWSClusteringP2P (hau-Latn)": 60.7, + "MasakhaNEWSClusteringP2P (ibo-Latn)": 48.41, + "MasakhaNEWSClusteringP2P (lin-Latn)": 57.69, + "MasakhaNEWSClusteringP2P (lug-Latn)": 71.95, + "MasakhaNEWSClusteringP2P (orm-Ethi)": 60.14, + "MasakhaNEWSClusteringP2P (pcm-Latn)": 80.84, + "MasakhaNEWSClusteringP2P (run-Latn)": 59.91, + "MasakhaNEWSClusteringP2P (sna-Latn)": 53.3, + "MasakhaNEWSClusteringP2P (som-Latn)": 34.38, + "MasakhaNEWSClusteringP2P (swa-Latn)": 33.25, + "MasakhaNEWSClusteringP2P (tir-Ethi)": 54.21, + "MasakhaNEWSClusteringP2P (xho-Latn)": 41.12, + "MasakhaNEWSClusteringP2P (yor-Latn)": 36.22, + "MasakhaNEWSClusteringP2P (fra)": 40.94, + "MasakhaNEWSClusteringS2S (amh-Ethi)": 47.24, + "MasakhaNEWSClusteringS2S (eng)": 53.93, + "MasakhaNEWSClusteringS2S (fra-Latn)": 39.84, + "MasakhaNEWSClusteringS2S (hau-Latn)": 19.24, + "MasakhaNEWSClusteringS2S (ibo-Latn)": 28.88, + "MasakhaNEWSClusteringS2S (lin-Latn)": 42.22, + "MasakhaNEWSClusteringS2S (lug-Latn)": 43.63, + "MasakhaNEWSClusteringS2S (orm-Ethi)": 26.29, + "MasakhaNEWSClusteringS2S (pcm-Latn)": 59.77, + "MasakhaNEWSClusteringS2S (run-Latn)": 51.46, + "MasakhaNEWSClusteringS2S (sna-Latn)": 48.14, + "MasakhaNEWSClusteringS2S (som-Latn)": 25.14, + "MasakhaNEWSClusteringS2S (swa-Latn)": 7.28, + "MasakhaNEWSClusteringS2S (tir-Ethi)": 50.51, + "MasakhaNEWSClusteringS2S (xho-Latn)": 30.98, + "MasakhaNEWSClusteringS2S (yor-Latn)": 34.09, + "MasakhaNEWSClusteringS2S (fra)": 30.56, + "MedrxivClusteringP2P": 31.7, + "MedrxivClusteringS2S": 29.76, + "RedditClustering": 46.91, + "RedditClusteringP2P": 63.0, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 52.03, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 45.11, + "StackExchangeClustering": 58.37, + "StackExchangeClusteringP2P": 32.9, + "ThuNewsClusteringP2P": 58.05, + "ThuNewsClusteringS2S": 55.59, + "TwentyNewsgroupsClustering": 39.4 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "multilingual-e5-large", + "CDSC-E (pol-Latn)": 74.47, + "CDSC-E": 74.47, + "Cmnli": 78.18, + "Ocnli": 61.6, + "OpusparcusPC (deu-Latn)": 97.27, + "OpusparcusPC (en)": 98.74, + "OpusparcusPC (fin-Latn)": 94.26, + "OpusparcusPC (fra-Latn)": 93.68, + "OpusparcusPC (rus-Cyrl)": 89.64, + "OpusparcusPC (swe-Latn)": 94.98, + "OpusparcusPC (fr)": 93.89, + "PPC": 92.18, + "PSC (pol-Latn)": 99.4, + "PSC": 99.39, + "PawsXPairClassification (deu-Latn)": 56.81, + "PawsXPairClassification (en)": 62.97, + "PawsXPairClassification (spa-Latn)": 56.85, + "PawsXPairClassification (fra-Latn)": 58.68, + "PawsXPairClassification (jpn-Hira)": 50.7, + "PawsXPairClassification (kor-Hang)": 52.08, + "PawsXPairClassification (cmn-Hans)": 56.82, + "PawsXPairClassification (fr)": 58.5, + "SICK-E-PL (pol-Latn)": 75.95, + "SICK-E-PL": 75.96, + "SprintDuplicateQuestions": 93.14, + "TERRa (rus-Cyrl)": 58.4, + "TwitterSemEval2015": 75.28, + "TwitterURLCorpus": 85.83 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "multilingual-e5-large", + "AlloprofReranking (fra-Latn)": 69.44, + "AlloprofReranking": 57.37, + "AskUbuntuDupQuestions": 59.24, + "CMedQAv1": 68.25, + "CMedQAv2": 68.56, + "MMarcoReranking (cmn-Hans)": 29.12, + "MMarcoReranking": 21.34, + "MindSmallReranking": 30.24, + "RuBQReranking (rus-Cyrl)": 75.58, + "SciDocsRR": 84.22, + "StackOverflowDupQuestions": 50.14, + "SyntecReranking (fra-Latn)": 85.45, + "SyntecReranking": 86.9, + "T2Reranking (cmn-Hans)": 66.32, + "T2Reranking": 65.83 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "multilingual-e5-large", + "AILACasedocs": 26.43, + "AILAStatutes": 20.84, + "ARCChallenge": 10.83, + "AlloprofRetrieval (fra-Latn)": 39.34, + "AlloprofRetrieval": 38.15, + "AlphaNLI": 13.59, + "ArguAna": 54.36, + "ArguAna-PL (pol-Latn)": 52.99, + "ArguAna-PL": 53.02, + "BSARDRetrieval (fra-Latn)": 21.28, + "BSARDRetrieval": 0.27, + "CmedqaRetrieval (cmn-Hans)": 28.66, + "CmedqaRetrieval": 28.67, + "CovidRetrieval (cmn-Hans)": 75.61, + "CovidRetrieval": 75.51, + "DBPedia-PL": 35.82, + "DuRetrieval (cmn-Hans)": 85.3, + "DuRetrieval": 85.32, + "EcomRetrieval (cmn-Hans)": 54.67, + "EcomRetrieval": 54.75, + "FiQA-PL (pol-Latn)": 32.97, + "FiQA-PL": 33.0, + "FiQA2018": 43.81, + "GerDaLIRSmall (deu-Latn)": 15.72, + "HellaSwag": 27.35, + "HotpotQA-PL": 67.41, + "LEMBNarrativeQARetrieval": 24.22, + "LEMBNeedleRetrieval": 28.0, + "LEMBPasskeyRetrieval": 38.25, + "LEMBQMSumRetrieval": 24.26, + "LEMBSummScreenFDRetrieval": 71.12, + "LEMBWikimQARetrieval": 56.8, + "LeCaRDv2 (zho-Hans)": 55.83, + "LegalBenchConsumerContractsQA": 73.3, + "LegalBenchCorporateLobbying": 89.72, + "LegalQuAD (deu-Latn)": 43.17, + "LegalSummarization": 62.1, + "MMarcoRetrieval (cmn-Hans)": 79.2, + "MMarcoRetrieval": 79.2, + "MSMARCO-PL": 33.38, + "MedicalRetrieval (cmn-Hans)": 51.44, + "MedicalRetrieval": 51.44, + "MintakaRetrieval (ara-Arab)": 26.5, + "MintakaRetrieval (deu-Latn)": 32.77, + "MintakaRetrieval (spa-Latn)": 34.23, + "MintakaRetrieval (fra-Latn)": 34.24, + "MintakaRetrieval (hin-Deva)": 27.45, + "MintakaRetrieval (ita-Latn)": 33.84, + "MintakaRetrieval (jpn-Hira)": 26.45, + "MintakaRetrieval (por-Latn)": 35.9, + "MintakaRetrieval (fr)": 25.2, + "NFCorpus": 33.95, + "NFCorpus-PL (pol-Latn)": 30.21, + "NFCorpus-PL": 30.24, + "NQ-PL": 52.79, + "PIQA": 28.82, + "Quail": 4.85, + "Quora-PL": 83.65, + "RARbCode": 58.92, + "RARbMath": 67.32, + "RiaNewsRetrieval (rus-Cyrl)": 80.67, + "RuBQRetrieval (rus-Cyrl)": 74.11, + "SCIDOCS": 17.45, + "SCIDOCS-PL (pol-Latn)": 13.82, + "SCIDOCS-PL": 13.81, + "SIQA": 5.36, + "SciFact": 70.42, + "SciFact-PL (pol-Latn)": 65.66, + "SciFact-PL": 65.66, + "SpartQA": 5.64, + "SyntecRetrieval (fra-Latn)": 82.39, + "SyntecRetrieval": 81.07, + "T2Retrieval (cmn-Hans)": 76.07, + "T2Retrieval": 76.11, + "TRECCOVID": 71.21, + "TRECCOVID-PL (pol-Latn)": 69.9, + "TRECCOVID-PL": 70.03, + "TempReasonL1": 1.14, + "TempReasonL2Fact": 42.97, + "TempReasonL2Pure": 2.05, + "TempReasonL3Fact": 38.22, + "TempReasonL3Pure": 8.31, + "Touche2020": 23.13, + "VideoRetrieval (cmn-Hans)": 58.28, + "VideoRetrieval": 58.25, + "WinoGrande": 54.99, + "XPQARetrieval (ara-Arab_ara-Arab)": 43.69, + "XPQARetrieval (eng-Latn_ara-Arab)": 30.86, + "XPQARetrieval (ara-Arab_eng-Latn)": 39.11, + "XPQARetrieval (deu-Latn_deu-Latn)": 76.83, + "XPQARetrieval (eng-Latn_deu-Latn)": 42.87, + "XPQARetrieval (deu-Latn_eng-Latn)": 68.25, + "XPQARetrieval (spa-Latn_spa-Latn)": 61.77, + "XPQARetrieval (eng-Latn_spa-Latn)": 37.55, + "XPQARetrieval (spa-Latn_eng-Latn)": 52.86, + "XPQARetrieval (fra-Latn_fra-Latn)": 61.38, + "XPQARetrieval (eng-Latn_fra-Latn)": 39.12, + "XPQARetrieval (fra-Latn_eng-Latn)": 57.93, + "XPQARetrieval (hin-Deva_hin-Deva)": 71.09, + "XPQARetrieval (eng-Latn_hin-Deva)": 32.39, + "XPQARetrieval (hin-Deva_eng-Latn)": 68.31, + "XPQARetrieval (ita-Latn_ita-Latn)": 74.32, + "XPQARetrieval (eng-Latn_ita-Latn)": 37.95, + "XPQARetrieval (ita-Latn_eng-Latn)": 64.54, + "XPQARetrieval (jpn-Hira_jpn-Hira)": 74.11, + "XPQARetrieval (eng-Latn_jpn-Hira)": 38.31, + "XPQARetrieval (jpn-Hira_eng-Latn)": 65.42, + "XPQARetrieval (kor-Hang_kor-Hang)": 35.72, + "XPQARetrieval (eng-Latn_kor-Hang)": 31.09, + "XPQARetrieval (kor-Hang_eng-Latn)": 34.06, + "XPQARetrieval (pol-Latn_pol-Latn)": 51.01, + "XPQARetrieval (eng-Latn_pol-Latn)": 30.49, + "XPQARetrieval (pol-Latn_eng-Latn)": 44.66, + "XPQARetrieval (por-Latn_por-Latn)": 41.1, + "XPQARetrieval (eng-Latn_por-Latn)": 22.03, + "XPQARetrieval (por-Latn_eng-Latn)": 35.15, + "XPQARetrieval (tam-Taml_tam-Taml)": 39.51, + "XPQARetrieval (eng-Latn_tam-Taml)": 17.33, + "XPQARetrieval (tam-Taml_eng-Latn)": 33.67, + "XPQARetrieval (cmn-Hans_cmn-Hans)": 66.27, + "XPQARetrieval (eng-Latn_cmn-Hans)": 26.24, + "XPQARetrieval (cmn-Hans_eng-Latn)": 55.15, + "XPQARetrieval (fr)": 66.15 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "multilingual-e5-large", + "AFQMC (cmn-Hans)": 33.01, + "AFQMC": 33.02, + "ATEC (cmn-Hans)": 39.8, + "ATEC": 39.81, + "BIOSSES": 82.49, + "BQ (cmn-Hans)": 46.44, + "BQ": 46.44, + "CDSC-R (pol-Latn)": 91.0, + "CDSC-R": 91.0, + "LCQMC (cmn-Hans)": 75.95, + "LCQMC": 75.95, + "PAWSX (cmn-Hans)": 14.63, + "PAWSX": 14.63, + "QBQTC": 29.77, + "RUParaPhraserSTS (rus-Cyrl)": 71.82, + "RuSTSBenchmarkSTS (rus-Cyrl)": 83.15, + "SICK-R": 80.23, + "SICK-R-PL (pol-Latn)": 75.08, + "SICK-R-PL": 75.08, + "SICKFr (fra-Latn)": 78.81, + "SICKFr": 78.78, + "STS12": 80.02, + "STS13": 81.55, + "STS14": 77.72, + "STS15": 89.31, + "STS16": 85.79, + "STS17 (en-en)": 88.12, + "STS17 (spa-Latn)": 86.71, + "STS17 (spa-Latn_eng-Latn)": 80.74, + "STS17 (eng-Latn_ara-Arab)": 75.03, + "STS17 (fra-Latn_eng-Latn)": 85.62, + "STS17 (kor-Hang)": 82.27, + "STS17 (ita-Latn_eng-Latn)": 84.52, + "STS17 (ara-Arab)": 77.83, + "STS17 (eng-Latn_tur-Latn)": 71.22, + "STS17 (eng-Latn_deu-Latn)": 86.15, + "STS17 (nld-Latn_eng-Latn)": 85.29, + "STS22 (spa-Latn)": 64.6, + "STS22 (spa-Latn_eng-Latn)": 72.51, + "STS22 (deu-Latn_eng-Latn)": 56.59, + "STS22 (cmn-Hans_eng-Latn)": 65.95, + "STS22 (deu-Latn_pol-Latn)": 49.58, + "STS22 (fra-Latn_pol-Latn)": 50.71, + "STS22 (en)": 63.66, + "STS22 (ara-Arab)": 56.95, + "STS22 (spa-Latn_ita-Latn)": 68.92, + "STS22 (tur-Latn)": 63.56, + "STS22 (deu-Latn_fra-Latn)": 67.96, + "STS22 (ita-Latn)": 76.99, + "STS22 (cmn-Hans)": 66.82, + "STS22 (rus-Cyrl)": 59.89, + "STS22 (fra-Latn)": 76.77, + "STS22 (pol-Latn_eng-Latn)": 65.54, + "STS22 (deu-Latn)": 56.58, + "STS22 (pol-Latn)": 34.65, + "STS22 (zh)": 65.64, + "STS22 (pl)": 34.66, + "STSB (cmn-Hans)": 81.08, + "STSB": 81.08, + "STSBenchmark": 87.29, + "STSBenchmarkMultilingualSTS (cmn-Hans)": 81.22, + "STSBenchmarkMultilingualSTS (en)": 87.29, + "STSBenchmarkMultilingualSTS (pol-Latn)": 81.06, + "STSBenchmarkMultilingualSTS (nld-Latn)": 81.63, + "STSBenchmarkMultilingualSTS (ita-Latn)": 81.75, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 83.05, + "STSBenchmarkMultilingualSTS (por-Latn)": 73.31, + "STSBenchmarkMultilingualSTS (spa-Latn)": 83.81, + "STSBenchmarkMultilingualSTS (fra-Latn)": 83.28, + "STSBenchmarkMultilingualSTS (deu-Latn)": 84.27, + "STSBenchmarkMultilingualSTS (fr)": 82.53 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "multilingual-e5-large", + "SummEval": 29.65, + "SummEvalFr (fra-Latn)": 30.92, + "SummEvalFr": 30.92 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "multilingual-e5-large" + } + ] + } + }, + "rubert-tiny-turbo": { + "BitextMining": { + "f1": [ + { + "Model": "rubert-tiny-turbo", + "Tatoeba (rus-Cyrl_eng-Latn)": 83.14 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "rubert-tiny-turbo", + "AmazonPolarityClassification": 68.36, + "Banking77Classification": 59.86, + "EmotionClassification": 29.5, + "GeoreviewClassification (rus-Cyrl)": 41.36, + "HeadlineClassification (rus-Cyrl)": 68.9, + "ImdbClassification": 58.36, + "InappropriatenessClassification (rus-Cyrl)": 59.11, + "KinopoiskClassification (rus-Cyrl)": 50.47, + "MassiveIntentClassification (cmo-Hans)": 5.21, + "MassiveIntentClassification (kor-Kore)": 2.53, + "MassiveIntentClassification (hin-Deva)": 2.56, + "MassiveIntentClassification (kan-Knda)": 2.06, + "MassiveIntentClassification (kat-Geor)": 2.64, + "MassiveIntentClassification (amh-Ethi)": 2.28, + "MassiveIntentClassification (mya-Mymr)": 3.96, + "MassiveIntentClassification (ell-Grek)": 9.66, + "MassiveIntentClassification (lav-Latn)": 22.32, + "MassiveIntentClassification (mal-Mlym)": 2.39, + "MassiveIntentClassification (mon-Cyrl)": 28.99, + "MassiveIntentClassification (urd-Arab)": 2.45, + "MassiveIntentClassification (fas-Arab)": 3.34, + "MassiveIntentClassification (ron-Latn)": 31.72, + "MassiveIntentClassification (isl-Latn)": 24.85, + "MassiveIntentClassification (en)": 50.16, + "MassiveIntentClassification (hun-Latn)": 25.52, + "MassiveIntentClassification (fra-Latn)": 31.51, + "MassiveIntentClassification (tha-Thai)": 3.74, + "MassiveIntentClassification (deu-Latn)": 32.1, + "MassiveIntentClassification (tur-Latn)": 27.56, + "MassiveIntentClassification (por-Latn)": 34.35, + "MassiveIntentClassification (sqi-Latn)": 32.38, + "MassiveIntentClassification (cmo-Hant)": 6.81, + "MassiveIntentClassification (hye-Armn)": 2.72, + "MassiveIntentClassification (dan-Latn)": 33.95, + "MassiveIntentClassification (afr-Latn)": 30.4, + "MassiveIntentClassification (ara-Arab)": 3.8, + "MassiveIntentClassification (jav-Latn)": 28.53, + "MassiveIntentClassification (tel-Telu)": 2.21, + "MassiveIntentClassification (tgl-Latn)": 32.02, + "MassiveIntentClassification (swa-Latn)": 27.79, + "MassiveIntentClassification (jpn-Jpan)": 5.61, + "MassiveIntentClassification (msa-Latn)": 28.94, + "MassiveIntentClassification (nob-Latn)": 32.3, + "MassiveIntentClassification (fin-Latn)": 31.13, + "MassiveIntentClassification (ind-Latn)": 33.56, + "MassiveIntentClassification (cym-Latn)": 31.68, + "MassiveIntentClassification (slv-Latn)": 31.39, + "MassiveIntentClassification (spa-Latn)": 31.03, + "MassiveIntentClassification (ben-Beng)": 3.08, + "MassiveIntentClassification (swe-Latn)": 30.23, + "MassiveIntentClassification (rus-Cyrl)": 57.98, + "MassiveIntentClassification (aze-Latn)": 23.58, + "MassiveIntentClassification (ita-Latn)": 35.24, + "MassiveIntentClassification (pol-Latn)": 26.82, + "MassiveIntentClassification (vie-Latn)": 23.72, + "MassiveIntentClassification (tam-Taml)": 1.5, + "MassiveIntentClassification (heb-Hebr)": 2.25, + "MassiveIntentClassification (nld-Latn)": 32.44, + "MassiveIntentClassification (khm-Khmr)": 5.14, + "MassiveScenarioClassification (cmo-Hans)": 10.6, + "MassiveScenarioClassification (kor-Kore)": 5.63, + "MassiveScenarioClassification (hin-Deva)": 7.41, + "MassiveScenarioClassification (kan-Knda)": 7.6, + "MassiveScenarioClassification (kat-Geor)": 7.01, + "MassiveScenarioClassification (amh-Ethi)": 7.68, + "MassiveScenarioClassification (mya-Mymr)": 10.73, + "MassiveScenarioClassification (ell-Grek)": 17.95, + "MassiveScenarioClassification (lav-Latn)": 29.29, + "MassiveScenarioClassification (mal-Mlym)": 6.92, + "MassiveScenarioClassification (mon-Cyrl)": 33.7, + "MassiveScenarioClassification (urd-Arab)": 8.53, + "MassiveScenarioClassification (fas-Arab)": 6.62, + "MassiveScenarioClassification (ron-Latn)": 40.02, + "MassiveScenarioClassification (isl-Latn)": 33.1, + "MassiveScenarioClassification (en)": 61.29, + "MassiveScenarioClassification (hun-Latn)": 36.41, + "MassiveScenarioClassification (fra-Latn)": 42.9, + "MassiveScenarioClassification (tha-Thai)": 8.26, + "MassiveScenarioClassification (deu-Latn)": 42.07, + "MassiveScenarioClassification (tur-Latn)": 34.85, + "MassiveScenarioClassification (por-Latn)": 40.79, + "MassiveScenarioClassification (sqi-Latn)": 42.66, + "MassiveScenarioClassification (cmo-Hant)": 11.93, + "MassiveScenarioClassification (hye-Armn)": 8.78, + "MassiveScenarioClassification (dan-Latn)": 43.69, + "MassiveScenarioClassification (afr-Latn)": 40.84, + "MassiveScenarioClassification (ara-Arab)": 11.86, + "MassiveScenarioClassification (jav-Latn)": 37.23, + "MassiveScenarioClassification (tel-Telu)": 6.91, + "MassiveScenarioClassification (tgl-Latn)": 38.16, + "MassiveScenarioClassification (swa-Latn)": 35.66, + "MassiveScenarioClassification (jpn-Jpan)": 10.6, + "MassiveScenarioClassification (msa-Latn)": 38.97, + "MassiveScenarioClassification (nob-Latn)": 39.05, + "MassiveScenarioClassification (fin-Latn)": 35.19, + "MassiveScenarioClassification (ind-Latn)": 39.54, + "MassiveScenarioClassification (cym-Latn)": 39.85, + "MassiveScenarioClassification (slv-Latn)": 35.98, + "MassiveScenarioClassification (spa-Latn)": 37.13, + "MassiveScenarioClassification (ben-Beng)": 8.85, + "MassiveScenarioClassification (swe-Latn)": 36.12, + "MassiveScenarioClassification (rus-Cyrl)": 62.9, + "MassiveScenarioClassification (aze-Latn)": 30.32, + "MassiveScenarioClassification (ita-Latn)": 42.69, + "MassiveScenarioClassification (pol-Latn)": 31.62, + "MassiveScenarioClassification (vie-Latn)": 31.89, + "MassiveScenarioClassification (tam-Taml)": 7.01, + "MassiveScenarioClassification (heb-Hebr)": 7.61, + "MassiveScenarioClassification (nld-Latn)": 40.94, + "MassiveScenarioClassification (khm-Khmr)": 8.51, + "RuReviewsClassification (rus-Cyrl)": 60.66, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 52.93, + "RuSciBenchOECDClassification (rus-Cyrl)": 40.79, + "ToxicConversationsClassification": 57.77, + "TweetSentimentExtractionClassification": 55.3 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "rubert-tiny-turbo", + "ArxivClusteringP2P": 24.83, + "ArxivClusteringS2S": 16.68, + "BiorxivClusteringP2P": 20.0, + "BiorxivClusteringS2S": 12.67, + "GeoreviewClusteringP2P (rus-Cyrl)": 59.71, + "MLSUMClusteringP2P (rus-Cyrl)": 40.02, + "MLSUMClusteringS2S (rus-Cyrl)": 41.36, + "MedrxivClusteringP2P": 20.79, + "MedrxivClusteringS2S": 18.18, + "RedditClustering": 26.28, + "RedditClusteringP2P": 40.48, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 47.55, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.44, + "StackExchangeClustering": 33.51, + "StackExchangeClusteringP2P": 27.98, + "TwentyNewsgroupsClustering": 19.9 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "rubert-tiny-turbo", + "OpusparcusPC (rus-Cyrl)": 87.58, + "TERRa (rus-Cyrl)": 56.09 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "rubert-tiny-turbo", + "RuBQReranking (rus-Cyrl)": 62.15 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "rubert-tiny-turbo", + "AILACasedocs": 7.43, + "AILAStatutes": 13.62, + "ARCChallenge": 3.85, + "AlphaNLI": 14.15, + "ArguAna": 32.03, + "ClimateFEVER": 5.56, + "DBPedia": 9.61, + "RiaNewsRetrieval (rus-Cyrl)": 51.27, + "RuBQRetrieval (rus-Cyrl)": 51.73 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "rubert-tiny-turbo", + "RUParaPhraserSTS (rus-Cyrl)": 72.15, + "RuSTSBenchmarkSTS (rus-Cyrl)": 78.48, + "STS22 (cmn-Hans)": 32.83, + "STS22 (deu-Latn_fra-Latn)": 17.5, + "STS22 (pol-Latn_eng-Latn)": 42.08, + "STS22 (rus-Cyrl)": 60.06, + "STS22 (fra-Latn)": 42.0, + "STS22 (deu-Latn)": 8.16, + "STS22 (tur-Latn)": 15.46, + "STS22 (deu-Latn_eng-Latn)": 21.55, + "STS22 (ita-Latn)": 39.69, + "STS22 (pol-Latn)": 9.71, + "STS22 (fra-Latn_pol-Latn)": 39.44, + "STS22 (deu-Latn_pol-Latn)": 25.53, + "STS22 (ara-Arab)": 27.95, + "STS22 (spa-Latn_eng-Latn)": 42.77, + "STS22 (spa-Latn_ita-Latn)": 32.83, + "STS22 (spa-Latn)": 45.31, + "STS22 (cmn-Hans_eng-Latn)": 31.25, + "STS22 (en)": 47.06, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 78.12 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "rubert-tiny-turbo" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "rubert-tiny-turbo" + } + ] + } + }, + "LLM2Vec-Meta-Llama-3-supervised": { + "BitextMining": { + "f1": [ + { + "Model": "LLM2Vec-Meta-Llama-3-supervised" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "LLM2Vec-Meta-Llama-3-supervised", + "AmazonCounterfactualClassification (en)": 79.94, + "AmazonPolarityClassification": 86.07, + "AmazonReviewsClassification (en)": 46.84, + "Banking77Classification": 88.05, + "EmotionClassification": 51.2, + "ImdbClassification": 82.94, + "MTOPDomainClassification (en)": 96.14, + "MTOPIntentClassification (en)": 86.11, + "MassiveIntentClassification (en)": 79.8, + "MassiveScenarioClassification (en)": 81.52, + "ToxicConversationsClassification": 70.59, + "TweetSentimentExtractionClassification": 61.9 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "LLM2Vec-Meta-Llama-3-supervised", + "ArxivClusteringP2P": 44.27, + "ArxivClusteringS2S": 46.85, + "BiorxivClusteringP2P": 32.35, + "BiorxivClusteringS2S": 36.7, + "MedrxivClusteringP2P": 30.71, + "MedrxivClusteringS2S": 32.96, + "RedditClustering": 61.72, + "RedditClusteringP2P": 63.98, + "StackExchangeClustering": 72.74, + "StackExchangeClusteringP2P": 32.26, + "TwentyNewsgroupsClustering": 56.41 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "LLM2Vec-Meta-Llama-3-supervised", + "SprintDuplicateQuestions": 95.09, + "TwitterSemEval2015": 81.73, + "TwitterURLCorpus": 86.56 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "LLM2Vec-Meta-Llama-3-supervised", + "AskUbuntuDupQuestions": 65.19, + "MindSmallReranking": 32.67, + "SciDocsRR": 86.05, + "StackOverflowDupQuestions": 54.82 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "LLM2Vec-Meta-Llama-3-supervised", + "ArguAna": 62.78, + "CQADupstackRetrieval": 48.25, + "ClimateFEVER": 34.27, + "DBPedia": 48.34, + "FEVER": 90.2, + "FiQA2018": 55.33, + "HotpotQA": 71.76, + "MSMARCO": 43.24, + "NFCorpus": 41.83, + "NQ": 64.21, + "QuoraRetrieval": 87.16, + "SCIDOCS": 22.96, + "SciFact": 78.22, + "TRECCOVID": 80.34, + "Touche2020": 20.5 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "LLM2Vec-Meta-Llama-3-supervised", + "BIOSSES": 84.92, + "SICK-R": 83.94, + "STS12": 79.27, + "STS13": 84.83, + "STS14": 82.94, + "STS15": 88.09, + "STS16": 86.54, + "STS17 (en-en)": 89.58, + "STS22 (en)": 67.67, + "STSBenchmark": 88.05 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "LLM2Vec-Meta-Llama-3-supervised", + "SummEval": 30.94 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "LLM2Vec-Meta-Llama-3-supervised" + } + ] + } + }, + "gtr-t5-xl": { + "BitextMining": { + "f1": [ + { + "Model": "gtr-t5-xl", + "BUCC (de-en)": 90.99, + "BUCC (fr-en)": 88.55, + "BUCC (ru-en)": 2.07, + "BUCC (zh-en)": 1.49, + "Tatoeba (afr-eng)": 33.47, + "Tatoeba (amh-eng)": 0.01, + "Tatoeba (ang-eng)": 30.74, + "Tatoeba (ara-eng)": 0.47, + "Tatoeba (arq-eng)": 0.34, + "Tatoeba (arz-eng)": 0.14, + "Tatoeba (ast-eng)": 51.74, + "Tatoeba (awa-eng)": 0.49, + "Tatoeba (aze-eng)": 7.43, + "Tatoeba (bel-eng)": 3.45, + "Tatoeba (ben-eng)": 0.06, + "Tatoeba (ber-eng)": 5.79, + "Tatoeba (bos-eng)": 17.43, + "Tatoeba (bre-eng)": 5.69, + "Tatoeba (bul-eng)": 7.55, + "Tatoeba (cat-eng)": 48.06, + "Tatoeba (cbk-eng)": 54.56, + "Tatoeba (ceb-eng)": 8.72, + "Tatoeba (ces-eng)": 8.76, + "Tatoeba (cha-eng)": 27.56, + "Tatoeba (cmn-eng)": 2.26, + "Tatoeba (cor-eng)": 3.69, + "Tatoeba (csb-eng)": 13.18, + "Tatoeba (cym-eng)": 6.97, + "Tatoeba (dan-eng)": 47.36, + "Tatoeba (deu-eng)": 91.54, + "Tatoeba (dsb-eng)": 13.2, + "Tatoeba (dtp-eng)": 4.54, + "Tatoeba (ell-eng)": 0.55, + "Tatoeba (epo-eng)": 27.86, + "Tatoeba (est-eng)": 5.13, + "Tatoeba (eus-eng)": 10.23, + "Tatoeba (fao-eng)": 21.44, + "Tatoeba (fin-eng)": 6.62, + "Tatoeba (fra-eng)": 79.66, + "Tatoeba (fry-eng)": 32.92, + "Tatoeba (gla-eng)": 2.87, + "Tatoeba (gle-eng)": 3.26, + "Tatoeba (glg-eng)": 63.81, + "Tatoeba (gsw-eng)": 29.71, + "Tatoeba (heb-eng)": 0.33, + "Tatoeba (hin-eng)": 0.25, + "Tatoeba (hrv-eng)": 17.16, + "Tatoeba (hsb-eng)": 12.02, + "Tatoeba (hun-eng)": 7.21, + "Tatoeba (hye-eng)": 0.78, + "Tatoeba (ido-eng)": 40.83, + "Tatoeba (ile-eng)": 54.95, + "Tatoeba (ina-eng)": 72.28, + "Tatoeba (ind-eng)": 30.95, + "Tatoeba (isl-eng)": 11.29, + "Tatoeba (ita-eng)": 73.83, + "Tatoeba (jav-eng)": 8.66, + "Tatoeba (jpn-eng)": 0.61, + "Tatoeba (kab-eng)": 1.78, + "Tatoeba (kat-eng)": 0.79, + "Tatoeba (kaz-eng)": 0.95, + "Tatoeba (khm-eng)": 0.49, + "Tatoeba (kor-eng)": 1.87, + "Tatoeba (kur-eng)": 10.91, + "Tatoeba (kzj-eng)": 5.72, + "Tatoeba (lat-eng)": 18.24, + "Tatoeba (lfn-eng)": 43.49, + "Tatoeba (lit-eng)": 7.13, + "Tatoeba (lvs-eng)": 7.04, + "Tatoeba (mal-eng)": 0.44, + "Tatoeba (mar-eng)": 0.03, + "Tatoeba (max-eng)": 18.99, + "Tatoeba (mhr-eng)": 1.11, + "Tatoeba (mkd-eng)": 2.49, + "Tatoeba (mon-eng)": 2.01, + "Tatoeba (nds-eng)": 39.96, + "Tatoeba (nld-eng)": 58.86, + "Tatoeba (nno-eng)": 29.07, + "Tatoeba (nob-eng)": 40.25, + "Tatoeba (nov-eng)": 50.19, + "Tatoeba (oci-eng)": 30.72, + "Tatoeba (orv-eng)": 0.85, + "Tatoeba (pam-eng)": 7.21, + "Tatoeba (pes-eng)": 0.53, + "Tatoeba (pms-eng)": 31.07, + "Tatoeba (pol-eng)": 18.06, + "Tatoeba (por-eng)": 81.92, + "Tatoeba (ron-eng)": 62.6, + "Tatoeba (rus-eng)": 22.24, + "Tatoeba (slk-eng)": 10.59, + "Tatoeba (slv-eng)": 11.4, + "Tatoeba (spa-eng)": 85.78, + "Tatoeba (sqi-eng)": 14.92, + "Tatoeba (srp-eng)": 9.87, + "Tatoeba (swe-eng)": 55.08, + "Tatoeba (swg-eng)": 32.66, + "Tatoeba (swh-eng)": 7.64, + "Tatoeba (tam-eng)": 0.49, + "Tatoeba (tat-eng)": 1.28, + "Tatoeba (tel-eng)": 0.45, + "Tatoeba (tgl-eng)": 23.63, + "Tatoeba (tha-eng)": 0.61, + "Tatoeba (tuk-eng)": 5.71, + "Tatoeba (tur-eng)": 8.25, + "Tatoeba (tzl-eng)": 28.4, + "Tatoeba (uig-eng)": 0.57, + "Tatoeba (ukr-eng)": 5.69, + "Tatoeba (urd-eng)": 0.0, + "Tatoeba (uzb-eng)": 4.19, + "Tatoeba (vie-eng)": 9.07, + "Tatoeba (war-eng)": 12.31, + "Tatoeba (wuu-eng)": 1.38, + "Tatoeba (xho-eng)": 7.6, + "Tatoeba (yid-eng)": 0.41, + "Tatoeba (yue-eng)": 1.31, + "Tatoeba (zsm-eng)": 29.74 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "gtr-t5-xl", + "AmazonCounterfactualClassification (de)": 59.79, + "AmazonCounterfactualClassification (en)": 68.6, + "AmazonCounterfactualClassification (en-ext)": 69.03, + "AmazonCounterfactualClassification (ja)": 50.59, + "AmazonPolarityClassification": 74.58, + "AmazonReviewsClassification (de)": 35.06, + "AmazonReviewsClassification (en)": 38.2, + "AmazonReviewsClassification (es)": 37.18, + "AmazonReviewsClassification (fr)": 35.48, + "AmazonReviewsClassification (ja)": 22.24, + "AmazonReviewsClassification (zh)": 21.89, + "Banking77Classification": 82.22, + "EmotionClassification": 45.54, + "ImdbClassification": 68.15, + "MTOPDomainClassification (de)": 85.42, + "MTOPDomainClassification (en)": 93.6, + "MTOPDomainClassification (es)": 88.2, + "MTOPDomainClassification (fr)": 85.05, + "MTOPDomainClassification (hi)": 21.74, + "MTOPDomainClassification (th)": 15.87, + "MTOPIntentClassification (de)": 55.75, + "MTOPIntentClassification (en)": 65.93, + "MTOPIntentClassification (es)": 57.73, + "MTOPIntentClassification (fr)": 51.07, + "MTOPIntentClassification (hi)": 3.19, + "MTOPIntentClassification (th)": 5.55, + "MassiveIntentClassification (af)": 42.6, + "MassiveIntentClassification (am)": 2.12, + "MassiveIntentClassification (ar)": 4.64, + "MassiveIntentClassification (az)": 35.05, + "MassiveIntentClassification (bn)": 2.84, + "MassiveIntentClassification (cy)": 36.19, + "MassiveIntentClassification (da)": 48.42, + "MassiveIntentClassification (de)": 55.49, + "MassiveIntentClassification (el)": 10.14, + "MassiveIntentClassification (en)": 70.23, + "MassiveIntentClassification (es)": 56.72, + "MassiveIntentClassification (fa)": 3.54, + "MassiveIntentClassification (fi)": 37.13, + "MassiveIntentClassification (fr)": 57.67, + "MassiveIntentClassification (he)": 2.56, + "MassiveIntentClassification (hi)": 3.24, + "MassiveIntentClassification (hu)": 34.22, + "MassiveIntentClassification (hy)": 3.01, + "MassiveIntentClassification (id)": 46.54, + "MassiveIntentClassification (is)": 34.77, + "MassiveIntentClassification (it)": 54.13, + "MassiveIntentClassification (ja)": 4.27, + "MassiveIntentClassification (jv)": 36.97, + "MassiveIntentClassification (ka)": 2.72, + "MassiveIntentClassification (km)": 5.35, + "MassiveIntentClassification (kn)": 3.17, + "MassiveIntentClassification (ko)": 2.64, + "MassiveIntentClassification (lv)": 36.32, + "MassiveIntentClassification (ml)": 3.18, + "MassiveIntentClassification (mn)": 22.85, + "MassiveIntentClassification (ms)": 42.87, + "MassiveIntentClassification (my)": 4.04, + "MassiveIntentClassification (nb)": 45.87, + "MassiveIntentClassification (nl)": 49.53, + "MassiveIntentClassification (pl)": 42.64, + "MassiveIntentClassification (pt)": 57.03, + "MassiveIntentClassification (ro)": 49.95, + "MassiveIntentClassification (ru)": 36.58, + "MassiveIntentClassification (sl)": 39.44, + "MassiveIntentClassification (sq)": 41.78, + "MassiveIntentClassification (sv)": 47.95, + "MassiveIntentClassification (sw)": 35.85, + "MassiveIntentClassification (ta)": 2.32, + "MassiveIntentClassification (te)": 2.2, + "MassiveIntentClassification (th)": 3.74, + "MassiveIntentClassification (tl)": 43.12, + "MassiveIntentClassification (tr)": 35.24, + "MassiveIntentClassification (ur)": 3.0, + "MassiveIntentClassification (vi)": 30.01, + "MassiveIntentClassification (zh-CN)": 1.72, + "MassiveIntentClassification (zh-TW)": 3.35, + "MassiveScenarioClassification (af)": 52.54, + "MassiveScenarioClassification (am)": 6.3, + "MassiveScenarioClassification (ar)": 11.96, + "MassiveScenarioClassification (az)": 40.17, + "MassiveScenarioClassification (bn)": 8.29, + "MassiveScenarioClassification (cy)": 42.24, + "MassiveScenarioClassification (da)": 57.28, + "MassiveScenarioClassification (de)": 68.09, + "MassiveScenarioClassification (el)": 16.66, + "MassiveScenarioClassification (en)": 75.94, + "MassiveScenarioClassification (es)": 64.32, + "MassiveScenarioClassification (fa)": 6.9, + "MassiveScenarioClassification (fi)": 43.96, + "MassiveScenarioClassification (fr)": 66.72, + "MassiveScenarioClassification (he)": 7.51, + "MassiveScenarioClassification (hi)": 7.82, + "MassiveScenarioClassification (hu)": 42.16, + "MassiveScenarioClassification (hy)": 9.33, + "MassiveScenarioClassification (id)": 53.54, + "MassiveScenarioClassification (is)": 42.84, + "MassiveScenarioClassification (it)": 62.44, + "MassiveScenarioClassification (ja)": 7.29, + "MassiveScenarioClassification (jv)": 43.13, + "MassiveScenarioClassification (ka)": 7.63, + "MassiveScenarioClassification (km)": 9.08, + "MassiveScenarioClassification (kn)": 8.1, + "MassiveScenarioClassification (ko)": 6.35, + "MassiveScenarioClassification (lv)": 40.24, + "MassiveScenarioClassification (ml)": 7.65, + "MassiveScenarioClassification (mn)": 27.98, + "MassiveScenarioClassification (ms)": 52.41, + "MassiveScenarioClassification (my)": 9.21, + "MassiveScenarioClassification (nb)": 54.44, + "MassiveScenarioClassification (nl)": 60.35, + "MassiveScenarioClassification (pl)": 49.97, + "MassiveScenarioClassification (pt)": 62.78, + "MassiveScenarioClassification (ro)": 59.62, + "MassiveScenarioClassification (ru)": 43.44, + "MassiveScenarioClassification (sl)": 44.79, + "MassiveScenarioClassification (sq)": 50.84, + "MassiveScenarioClassification (sv)": 58.21, + "MassiveScenarioClassification (sw)": 44.63, + "MassiveScenarioClassification (ta)": 7.95, + "MassiveScenarioClassification (te)": 7.5, + "MassiveScenarioClassification (th)": 8.79, + "MassiveScenarioClassification (tl)": 53.54, + "MassiveScenarioClassification (tr)": 42.47, + "MassiveScenarioClassification (ur)": 9.58, + "MassiveScenarioClassification (vi)": 34.68, + "MassiveScenarioClassification (zh-CN)": 5.21, + "MassiveScenarioClassification (zh-TW)": 8.77, + "ToxicConversationsClassification": 67.56, + "TweetSentimentExtractionClassification": 54.77 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "gtr-t5-xl", + "ArxivClusteringP2P": 37.9, + "ArxivClusteringS2S": 30.45, + "BiorxivClusteringP2P": 30.52, + "BiorxivClusteringS2S": 26.06, + "MedrxivClusteringP2P": 28.69, + "MedrxivClusteringS2S": 26.69, + "RedditClustering": 61.34, + "RedditClusteringP2P": 61.11, + "StackExchangeClustering": 69.95, + "StackExchangeClusteringP2P": 32.73, + "TwentyNewsgroupsClustering": 51.15 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "gtr-t5-xl", + "SprintDuplicateQuestions": 95.45, + "TwitterSemEval2015": 77.81, + "TwitterURLCorpus": 85.14 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "gtr-t5-xl", + "AskUbuntuDupQuestions": 63.08, + "MindSmallReranking": 31.5, + "SciDocsRR": 76.49, + "StackOverflowDupQuestions": 52.79 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "gtr-t5-xl", + "ArguAna": 52.81, + "CQADupstackRetrieval": 37.35, + "ClimateFEVER": 27.01, + "DBPedia": 39.74, + "FEVER": 72.18, + "FiQA2018": 44.19, + "HotpotQA": 58.91, + "MSMARCO": 43.52, + "NFCorpus": 33.34, + "NQ": 56.16, + "QuoraRetrieval": 88.91, + "SCIDOCS": 15.71, + "SciFact": 64.2, + "TRECCOVID": 60.09, + "Touche2020": 25.26 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "gtr-t5-xl", + "BIOSSES": 78.94, + "SICK-R": 73.63, + "STS12": 69.11, + "STS13": 81.82, + "STS14": 77.07, + "STS15": 86.01, + "STS16": 82.23, + "STS17 (ar-ar)": 9.06, + "STS17 (en-ar)": -3.22, + "STS17 (en-de)": 70.38, + "STS17 (en-en)": 84.9, + "STS17 (en-tr)": 17.17, + "STS17 (es-en)": 60.24, + "STS17 (es-es)": 81.93, + "STS17 (fr-en)": 62.17, + "STS17 (it-en)": 59.11, + "STS17 (ko-ko)": 8.9, + "STS17 (nl-en)": 56.91, + "STS22 (ar)": 37.66, + "STS22 (de)": 50.58, + "STS22 (de-en)": 53.63, + "STS22 (de-fr)": 55.72, + "STS22 (de-pl)": 27.99, + "STS22 (en)": 66.61, + "STS22 (es)": 59.14, + "STS22 (es-en)": 69.99, + "STS22 (es-it)": 60.94, + "STS22 (fr)": 79.43, + "STS22 (fr-pl)": 61.98, + "STS22 (it)": 67.14, + "STS22 (pl)": 33.74, + "STS22 (pl-en)": 60.18, + "STS22 (ru)": 32.69, + "STS22 (tr)": 55.79, + "STS22 (zh)": 31.16, + "STS22 (zh-en)": 28.85, + "STSBenchmark": 77.65 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "gtr-t5-xl", + "SummEval": 30.21 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "gtr-t5-xl" + } + ] + } + }, + "herbert-base-retrieval-v2": { + "BitextMining": { + "f1": [ + { + "Model": "herbert-base-retrieval-v2" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "herbert-base-retrieval-v2", + "AllegroReviews": 34.11, + "CBD": 68.35, + "MassiveIntentClassification (pl)": 65.53, + "MassiveScenarioClassification (pl)": 68.51, + "PAC": 68.4, + "PolEmo2.0-IN": 64.18, + "PolEmo2.0-OUT": 45.73 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "herbert-base-retrieval-v2", + "8TagsClustering": 28.15 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "herbert-base-retrieval-v2", + "CDSC-E": 63.31, + "PPC": 84.18, + "PSC": 98.87, + "SICK-E-PL": 54.93 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "herbert-base-retrieval-v2" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "herbert-base-retrieval-v2", + "ArguAna-PL": 41.97, + "DBPedia-PL": 24.07, + "FiQA-PL": 24.25, + "HotpotQA-PL": 43.41, + "MSMARCO-PL": 51.56, + "NFCorpus-PL": 25.95, + "NQ-PL": 35.09, + "Quora-PL": 78.86, + "SCIDOCS-PL": 11.0, + "SciFact-PL": 51.92, + "TRECCOVID-PL": 42.64 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "herbert-base-retrieval-v2", + "CDSC-R": 86.18, + "SICK-R-PL": 64.67, + "STS22 (pl)": 39.73 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "herbert-base-retrieval-v2" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "herbert-base-retrieval-v2" + } + ] + } + }, + "LLM2Vec-Meta-Llama-3-unsupervised": { + "BitextMining": { + "f1": [ + { + "Model": "LLM2Vec-Meta-Llama-3-unsupervised" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "LLM2Vec-Meta-Llama-3-unsupervised", + "AmazonCounterfactualClassification (en)": 75.7, + "AmazonPolarityClassification": 80.68, + "AmazonReviewsClassification (en)": 40.0, + "Banking77Classification": 84.77, + "EmotionClassification": 47.08, + "ImdbClassification": 75.19, + "MTOPDomainClassification (en)": 94.47, + "MTOPIntentClassification (en)": 81.09, + "MassiveIntentClassification (en)": 75.01, + "MassiveScenarioClassification (en)": 79.16, + "ToxicConversationsClassification": 71.85, + "TweetSentimentExtractionClassification": 57.61 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "LLM2Vec-Meta-Llama-3-unsupervised", + "ArxivClusteringP2P": 49.22, + "ArxivClusteringS2S": 41.71, + "BiorxivClusteringP2P": 38.39, + "BiorxivClusteringS2S": 31.31, + "MedrxivClusteringP2P": 31.47, + "MedrxivClusteringS2S": 27.87, + "RedditClustering": 43.67, + "RedditClusteringP2P": 61.67, + "StackExchangeClustering": 68.2, + "StackExchangeClusteringP2P": 36.36, + "TwentyNewsgroupsClustering": 32.01 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "LLM2Vec-Meta-Llama-3-unsupervised", + "SprintDuplicateQuestions": 88.14, + "TwitterSemEval2015": 66.6, + "TwitterURLCorpus": 79.3 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "LLM2Vec-Meta-Llama-3-unsupervised", + "AskUbuntuDupQuestions": 57.16, + "MindSmallReranking": 30.1, + "SciDocsRR": 76.28, + "StackOverflowDupQuestions": 48.82 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "LLM2Vec-Meta-Llama-3-unsupervised", + "ArguAna": 51.73, + "CQADupstackRetrieval": 32.4, + "ClimateFEVER": 23.58, + "DBPedia": 26.78, + "FEVER": 53.42, + "FiQA2018": 28.56, + "HotpotQA": 52.37, + "MSMARCO": 17.47, + "NFCorpus": 26.28, + "NQ": 37.65, + "QuoraRetrieval": 84.64, + "SCIDOCS": 10.39, + "SciFact": 66.36, + "TRECCOVID": 63.34, + "Touche2020": 12.82 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "LLM2Vec-Meta-Llama-3-unsupervised", + "BIOSSES": 84.67, + "SICK-R": 72.16, + "STS12": 61.6, + "STS13": 79.71, + "STS14": 72.11, + "STS15": 82.18, + "STS16": 79.41, + "STS17 (en-en)": 85.44, + "STS22 (en)": 63.9, + "STSBenchmark": 77.44 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "LLM2Vec-Meta-Llama-3-unsupervised", + "SummEval": 31.45 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "LLM2Vec-Meta-Llama-3-unsupervised" + } + ] + } + }, + "unsup-simcse-bert-base-uncased": { + "BitextMining": { + "f1": [ + { + "Model": "unsup-simcse-bert-base-uncased" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "unsup-simcse-bert-base-uncased", + "AmazonCounterfactualClassification (en)": 67.09, + "AmazonPolarityClassification": 74.48, + "AmazonReviewsClassification (en)": 33.85, + "Banking77Classification": 73.55, + "EmotionClassification": 42.22, + "ImdbClassification": 69.63, + "MTOPDomainClassification (en)": 81.71, + "MTOPIntentClassification (en)": 59.23, + "MassiveIntentClassification (en)": 59.84, + "MassiveScenarioClassification (en)": 66.25, + "ToxicConversationsClassification": 68.82, + "TweetSentimentExtractionClassification": 53.36 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "unsup-simcse-bert-base-uncased", + "ArxivClusteringP2P": 32.61, + "ArxivClusteringS2S": 24.68, + "BiorxivClusteringP2P": 24.9, + "BiorxivClusteringS2S": 19.55, + "MedrxivClusteringP2P": 23.6, + "MedrxivClusteringS2S": 21.97, + "RedditClustering": 32.18, + "RedditClusteringP2P": 45.14, + "StackExchangeClustering": 43.07, + "StackExchangeClusteringP2P": 28.5, + "TwentyNewsgroupsClustering": 23.21 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "unsup-simcse-bert-base-uncased", + "SprintDuplicateQuestions": 69.41, + "TwitterSemEval2015": 60.21, + "TwitterURLCorpus": 81.37 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "unsup-simcse-bert-base-uncased", + "AskUbuntuDupQuestions": 51.57, + "MindSmallReranking": 28.62, + "SciDocsRR": 66.33, + "StackOverflowDupQuestions": 39.35 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "unsup-simcse-bert-base-uncased", + "ArguAna": 38.34, + "CQADupstackRetrieval": 13.22, + "ClimateFEVER": 11.8, + "DBPedia": 15.04, + "FEVER": 21.06, + "FiQA2018": 9.84, + "HotpotQA": 19.75, + "MSMARCO": 9.35, + "NFCorpus": 9.88, + "NQ": 11.69, + "QuoraRetrieval": 78.03, + "SCIDOCS": 5.5, + "SciFact": 25.72, + "TRECCOVID": 26.2, + "Touche2020": 8.9 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "unsup-simcse-bert-base-uncased", + "BIOSSES": 72.31, + "SICK-R": 72.24, + "STS12": 66.05, + "STS13": 81.49, + "STS14": 73.61, + "STS15": 79.72, + "STS16": 78.12, + "STS17 (en-en)": 83.58, + "STS22 (en)": 59.65, + "STSBenchmark": 76.52 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "unsup-simcse-bert-base-uncased", + "SummEval": 31.15 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "unsup-simcse-bert-base-uncased" + } + ] + } + }, + "flan-t5-large": { + "BitextMining": { + "f1": [ + { + "Model": "flan-t5-large" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "flan-t5-large" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "flan-t5-large" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "flan-t5-large" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "flan-t5-large" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "flan-t5-large" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "flan-t5-large" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "flan-t5-large" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "flan-t5-large", + "Core17InstructionRetrieval": 1.32, + "News21InstructionRetrieval": 8.95, + "Robust04InstructionRetrieval": 3.9 + } + ] + } + }, + "sentence-camembert-base": { + "BitextMining": { + "f1": [ + { + "Model": "sentence-camembert-base" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "sentence-camembert-base", + "AmazonReviewsClassification (fr)": 36.03, + "MTOPDomainClassification (fr)": 77.1, + "MTOPIntentClassification (fr)": 43.44, + "MasakhaNEWSClassification (fra)": 70.36, + "MassiveIntentClassification (fr)": 51.59, + "MassiveScenarioClassification (fr)": 61.28 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "sentence-camembert-base", + "AlloProfClusteringP2P": 59.09, + "AlloProfClusteringS2S": 38.92, + "HALClusteringS2S": 20.22, + "MLSUMClusteringP2P": 35.98, + "MLSUMClusteringS2S": 27.05, + "MasakhaNEWSClusteringP2P (fra)": 36.03, + "MasakhaNEWSClusteringS2S (fra)": 30.77 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "sentence-camembert-base", + "OpusparcusPC (fr)": 92.05, + "PawsXPairClassification (fr)": 57.44 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "sentence-camembert-base", + "AlloprofReranking": 48.68, + "SyntecReranking": 79.75 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "sentence-camembert-base", + "AlloprofRetrieval": 21.94, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 13.36, + "SyntecRetrieval": 68.62, + "XPQARetrieval (fr)": 57.92 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "sentence-camembert-base", + "SICKFr": 74.18, + "STS22 (fr)": 77.54, + "STSBenchmarkMultilingualSTS (fr)": 81.64 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "sentence-camembert-base", + "SummEvalFr": 28.77 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "sentence-camembert-base" + } + ] + } + }, + "bge-base-en-v1.5-instruct": { + "BitextMining": { + "f1": [ + { + "Model": "bge-base-en-v1.5-instruct" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "bge-base-en-v1.5-instruct" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "bge-base-en-v1.5-instruct" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "bge-base-en-v1.5-instruct" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "bge-base-en-v1.5-instruct" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "bge-base-en-v1.5-instruct", + "ARCChallenge": 8.85, + "AlphaNLI": 4.13, + "HellaSwag": 24.03, + "PIQA": 23.03, + "Quail": 1.25, + "RARbCode": 46.32, + "RARbMath": 45.62, + "SIQA": 0.24, + "SpartQA": 2.67, + "TempReasonL1": 0.8, + "TempReasonL2Fact": 16.56, + "TempReasonL2Pure": 1.33, + "TempReasonL3Fact": 12.68, + "TempReasonL3Pure": 5.08, + "WinoGrande": 10.27 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "bge-base-en-v1.5-instruct" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "bge-base-en-v1.5-instruct" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "bge-base-en-v1.5-instruct" + } + ] + } + }, + "glove.6B.300d": { + "BitextMining": { + "f1": [ + { + "Model": "glove.6B.300d", + "BUCC (de-en)": 0.18, + "BUCC (fr-en)": 0.19, + "BUCC (ru-en)": 0.1, + "BUCC (zh-en)": 0.0 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "glove.6B.300d", + "AmazonCounterfactualClassification (en)": 56.91, + "AmazonPolarityClassification": 60.32, + "AmazonReviewsClassification (en)": 29.67, + "Banking77Classification": 67.69, + "EmotionClassification": 36.93, + "ImdbClassification": 62.57, + "MTOPDomainClassification (en)": 79.11, + "MTOPIntentClassification (en)": 55.85, + "MassiveIntentClassification (en)": 56.19, + "MassiveScenarioClassification (en)": 66.03, + "ToxicConversationsClassification": 65.4, + "TweetSentimentExtractionClassification": 50.8 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "glove.6B.300d", + "ArxivClusteringP2P": 32.56, + "ArxivClusteringS2S": 23.14, + "BiorxivClusteringP2P": 29.27, + "BiorxivClusteringS2S": 19.18, + "MedrxivClusteringP2P": 26.12, + "MedrxivClusteringS2S": 20.38, + "RedditClustering": 28.46, + "RedditClusteringP2P": 35.82, + "StackExchangeClustering": 35.8, + "StackExchangeClusteringP2P": 28.51, + "TwentyNewsgroupsClustering": 25.83 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "glove.6B.300d", + "SprintDuplicateQuestions": 86.96, + "TwitterSemEval2015": 48.45, + "TwitterURLCorpus": 77.35 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "glove.6B.300d", + "AskUbuntuDupQuestions": 49.57, + "MindSmallReranking": 27.01, + "SciDocsRR": 62.56, + "StackOverflowDupQuestions": 34.03 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "glove.6B.300d", + "ArguAna": 36.3, + "CQADupstackRetrieval": 15.47, + "ClimateFEVER": 14.44, + "DBPedia": 18.28, + "FEVER": 14.99, + "FiQA2018": 10.09, + "HotpotQA": 19.18, + "MSMARCO": 9.6, + "NFCorpus": 13.87, + "NQ": 12.87, + "QuoraRetrieval": 71.32, + "SCIDOCS": 8.04, + "SciFact": 29.58, + "TRECCOVID": 36.22, + "Touche2020": 13.99 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "glove.6B.300d", + "BIOSSES": 44.93, + "SICK-R": 55.43, + "STS12": 54.64, + "STS13": 69.16, + "STS14": 60.81, + "STS15": 72.31, + "STS16": 65.34, + "STS17 (en-en)": 77.95, + "STS22 (en)": 56.35, + "STSBenchmark": 61.54 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "glove.6B.300d", + "SummEval": 28.87 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "glove.6B.300d" + } + ] + } + }, + "gottbert-base": { + "BitextMining": { + "f1": [ + { + "Model": "gottbert-base" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "gottbert-base" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "gottbert-base", + "BlurbsClusteringP2P": 34.49, + "BlurbsClusteringS2S": 8.37, + "TenKGnadClusteringP2P": 33.66, + "TenKGnadClusteringS2S": 9.34 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "gottbert-base" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "gottbert-base" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "gottbert-base" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "gottbert-base" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "gottbert-base" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "gottbert-base" + } + ] + } + }, + "flaubert_base_cased": { + "BitextMining": { + "f1": [ + { + "Model": "flaubert_base_cased" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "flaubert_base_cased", + "AmazonReviewsClassification (fr)": 24.9, + "MTOPDomainClassification (fr)": 25.55, + "MTOPIntentClassification (fr)": 9.49, + "MasakhaNEWSClassification (fra)": 71.14, + "MassiveIntentClassification (fr)": 6.98, + "MassiveScenarioClassification (fr)": 11.41 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "flaubert_base_cased", + "AlloProfClusteringP2P": 52.86, + "AlloProfClusteringS2S": 14.46, + "HALClusteringS2S": 3.85, + "MLSUMClusteringP2P": 39.06, + "MLSUMClusteringS2S": 17.13, + "MasakhaNEWSClusteringP2P (fra)": 41.61, + "MasakhaNEWSClusteringS2S (fra)": 21.26 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "flaubert_base_cased", + "OpusparcusPC (fr)": 82.15, + "PawsXPairClassification (fr)": 51.89 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "flaubert_base_cased", + "AlloprofReranking": 34.81, + "SyntecReranking": 55.88 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "flaubert_base_cased", + "AlloprofRetrieval": 1.63, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 0.58, + "SyntecRetrieval": 20.56, + "XPQARetrieval (fr)": 6.59 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "flaubert_base_cased", + "SICKFr": 53.86, + "STS22 (fr)": 65.37, + "STSBenchmarkMultilingualSTS (fr)": 37.14 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "flaubert_base_cased", + "SummEvalFr": 31.26 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "flaubert_base_cased" + } + ] + } + }, + "e5-base-4k": { + "BitextMining": { + "f1": [ + { + "Model": "e5-base-4k" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "e5-base-4k" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "e5-base-4k" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "e5-base-4k" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "e5-base-4k" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "e5-base-4k", + "LEMBNarrativeQARetrieval": 30.35, + "LEMBNeedleRetrieval": 41.5, + "LEMBPasskeyRetrieval": 67.25, + "LEMBQMSumRetrieval": 35.6, + "LEMBSummScreenFDRetrieval": 95.23, + "LEMBWikimQARetrieval": 69.19 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "e5-base-4k" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "e5-base-4k" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "e5-base-4k" + } + ] + } + }, + "gte-Qwen1.5-7B-instruct": { + "BitextMining": { + "f1": [ + { + "Model": "gte-Qwen1.5-7B-instruct" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "gte-Qwen1.5-7B-instruct", + "AmazonCounterfactualClassification (en)": 83.16, + "AmazonPolarityClassification": 96.7, + "AmazonReviewsClassification (en)": 62.17, + "AmazonReviewsClassification (zh)": 52.95, + "Banking77Classification": 81.68, + "EmotionClassification": 54.53, + "IFlyTek": 53.77, + "ImdbClassification": 95.58, + "JDReview": 88.2, + "MTOPDomainClassification (en)": 95.75, + "MTOPIntentClassification (en)": 84.26, + "MassiveIntentClassification (zh-CN)": 76.25, + "MassiveIntentClassification (en)": 78.47, + "MassiveScenarioClassification (en)": 78.19, + "MassiveScenarioClassification (zh-CN)": 77.26, + "MultilingualSentiment": 77.42, + "OnlineShopping": 94.48, + "TNews": 51.24, + "ToxicConversationsClassification": 78.75, + "TweetSentimentExtractionClassification": 66.0, + "Waimai": 88.63 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "gte-Qwen1.5-7B-instruct", + "ArxivClusteringP2P": 56.4, + "ArxivClusteringS2S": 51.45, + "BiorxivClusteringP2P": 49.01, + "BiorxivClusteringS2S": 45.06, + "CLSClusteringP2P": 47.21, + "CLSClusteringS2S": 45.79, + "MedrxivClusteringP2P": 44.37, + "MedrxivClusteringS2S": 42.0, + "RedditClustering": 73.37, + "RedditClusteringP2P": 72.51, + "StackExchangeClustering": 79.07, + "StackExchangeClusteringP2P": 49.57, + "ThuNewsClusteringP2P": 87.43, + "ThuNewsClusteringS2S": 87.9, + "TwentyNewsgroupsClustering": 51.31 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "gte-Qwen1.5-7B-instruct", + "Cmnli": 91.81, + "Ocnli": 85.22, + "SprintDuplicateQuestions": 95.99, + "TwitterSemEval2015": 79.36, + "TwitterURLCorpus": 86.79 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "gte-Qwen1.5-7B-instruct", + "AskUbuntuDupQuestions": 66.0, + "CMedQAv1": 86.37, + "CMedQAv2": 87.41, + "MindSmallReranking": 32.71, + "SciDocsRR": 87.89, + "StackOverflowDupQuestions": 53.93, + "T2Reranking": 68.11 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "gte-Qwen1.5-7B-instruct", + "ArguAna": 62.65, + "BrightRetrieval (stackoverflow)": 19.85, + "BrightRetrieval (earth_science)": 36.22, + "BrightRetrieval (leetcode)": 25.46, + "BrightRetrieval (theoremqa_questions)": 26.97, + "BrightRetrieval (economics)": 17.72, + "BrightRetrieval (robotics)": 13.47, + "BrightRetrieval (pony)": 9.79, + "BrightRetrieval (aops)": 14.36, + "BrightRetrieval (psychology)": 24.61, + "BrightRetrieval (theoremqa_theorems)": 26.66, + "BrightRetrieval (biology)": 30.92, + "BrightRetrieval (sustainable_living)": 14.93, + "CQADupstackRetrieval": 40.64, + "ClimateFEVER": 44.0, + "CmedqaRetrieval": 43.47, + "CovidRetrieval": 80.87, + "DBPedia": 48.04, + "DuRetrieval": 86.01, + "EcomRetrieval": 66.46, + "FEVER": 93.35, + "FiQA2018": 55.31, + "HotpotQA": 72.25, + "MMarcoRetrieval": 73.83, + "MSMARCO": 41.68, + "MedicalRetrieval": 61.33, + "NFCorpus": 38.25, + "NQ": 61.79, + "QuoraRetrieval": 89.61, + "SCIDOCS": 27.69, + "SciFact": 75.31, + "T2Retrieval": 83.58, + "TRECCOVID": 72.72, + "Touche2020": 20.3, + "VideoRetrieval": 69.41 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "gte-Qwen1.5-7B-instruct", + "AFQMC": 58.47, + "ATEC": 55.46, + "BIOSSES": 81.12, + "BQ": 77.59, + "LCQMC": 76.29, + "PAWSX": 50.22, + "QBQTC": 31.82, + "SICK-R": 79.15, + "STS12": 76.52, + "STS13": 88.63, + "STS14": 83.32, + "STS15": 87.5, + "STS16": 86.39, + "STS17 (en-en)": 87.79, + "STS22 (en)": 66.4, + "STS22 (zh)": 67.36, + "STSB": 81.37, + "STSBenchmark": 87.35 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "gte-Qwen1.5-7B-instruct", + "SummEval": 31.46 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "gte-Qwen1.5-7B-instruct" + } + ] + } + }, + "LASER2": { + "BitextMining": { + "f1": [ + { + "Model": "LASER2", + "BUCC (de-en)": 99.21, + "BUCC (fr-en)": 98.39, + "BUCC (ru-en)": 97.62, + "BUCC (zh-en)": 97.7, + "Tatoeba (afr-eng)": 92.59, + "Tatoeba (amh-eng)": 80.82, + "Tatoeba (ang-eng)": 25.22, + "Tatoeba (ara-eng)": 90.14, + "Tatoeba (arq-eng)": 26.63, + "Tatoeba (arz-eng)": 66.16, + "Tatoeba (ast-eng)": 76.35, + "Tatoeba (awa-eng)": 33.74, + "Tatoeba (aze-eng)": 82.41, + "Tatoeba (bel-eng)": 79.54, + "Tatoeba (ben-eng)": 89.43, + "Tatoeba (ber-eng)": 77.63, + "Tatoeba (bos-eng)": 95.86, + "Tatoeba (bre-eng)": 31.2, + "Tatoeba (bul-eng)": 93.57, + "Tatoeba (cat-eng)": 95.8, + "Tatoeba (cbk-eng)": 77.17, + "Tatoeba (ceb-eng)": 9.93, + "Tatoeba (ces-eng)": 95.52, + "Tatoeba (cha-eng)": 14.86, + "Tatoeba (cmn-eng)": 85.62, + "Tatoeba (cor-eng)": 4.45, + "Tatoeba (csb-eng)": 27.03, + "Tatoeba (cym-eng)": 5.85, + "Tatoeba (dan-eng)": 95.22, + "Tatoeba (deu-eng)": 99.07, + "Tatoeba (dsb-eng)": 42.34, + "Tatoeba (dtp-eng)": 7.39, + "Tatoeba (ell-eng)": 96.2, + "Tatoeba (epo-eng)": 96.61, + "Tatoeba (est-eng)": 96.43, + "Tatoeba (eus-eng)": 93.32, + "Tatoeba (fao-eng)": 57.04, + "Tatoeba (fin-eng)": 96.98, + "Tatoeba (fra-eng)": 94.28, + "Tatoeba (fry-eng)": 42.07, + "Tatoeba (gla-eng)": 1.52, + "Tatoeba (gle-eng)": 4.2, + "Tatoeba (glg-eng)": 96.14, + "Tatoeba (gsw-eng)": 27.52, + "Tatoeba (heb-eng)": 0.0, + "Tatoeba (hin-eng)": 95.32, + "Tatoeba (hrv-eng)": 96.72, + "Tatoeba (hsb-eng)": 45.75, + "Tatoeba (hun-eng)": 95.2, + "Tatoeba (hye-eng)": 88.72, + "Tatoeba (ido-eng)": 80.86, + "Tatoeba (ile-eng)": 87.88, + "Tatoeba (ina-eng)": 93.93, + "Tatoeba (ind-eng)": 92.98, + "Tatoeba (isl-eng)": 94.32, + "Tatoeba (ita-eng)": 94.32, + "Tatoeba (jav-eng)": 9.95, + "Tatoeba (jpn-eng)": 93.78, + "Tatoeba (kab-eng)": 65.88, + "Tatoeba (kat-eng)": 81.16, + "Tatoeba (kaz-eng)": 53.3, + "Tatoeba (khm-eng)": 74.19, + "Tatoeba (kor-eng)": 87.97, + "Tatoeba (kur-eng)": 19.09, + "Tatoeba (kzj-eng)": 4.46, + "Tatoeba (lat-eng)": 64.81, + "Tatoeba (lfn-eng)": 63.39, + "Tatoeba (lit-eng)": 96.2, + "Tatoeba (lvs-eng)": 95.33, + "Tatoeba (mal-eng)": 98.16, + "Tatoeba (mar-eng)": 92.93, + "Tatoeba (max-eng)": 36.96, + "Tatoeba (mhr-eng)": 6.86, + "Tatoeba (mkd-eng)": 93.63, + "Tatoeba (mon-eng)": 3.42, + "Tatoeba (nds-eng)": 77.13, + "Tatoeba (nld-eng)": 95.35, + "Tatoeba (nno-eng)": 72.75, + "Tatoeba (nob-eng)": 95.77, + "Tatoeba (nov-eng)": 60.02, + "Tatoeba (oci-eng)": 58.13, + "Tatoeba (orv-eng)": 23.24, + "Tatoeba (pam-eng)": 3.24, + "Tatoeba (pes-eng)": 93.13, + "Tatoeba (pms-eng)": 36.23, + "Tatoeba (pol-eng)": 97.32, + "Tatoeba (por-eng)": 94.54, + "Tatoeba (ron-eng)": 96.52, + "Tatoeba (rus-eng)": 92.58, + "Tatoeba (slk-eng)": 95.82, + "Tatoeba (slv-eng)": 95.4, + "Tatoeba (spa-eng)": 97.33, + "Tatoeba (sqi-eng)": 97.22, + "Tatoeba (srp-eng)": 93.64, + "Tatoeba (swe-eng)": 95.31, + "Tatoeba (swg-eng)": 33.1, + "Tatoeba (swh-eng)": 55.66, + "Tatoeba (tam-eng)": 87.32, + "Tatoeba (tat-eng)": 34.74, + "Tatoeba (tel-eng)": 96.72, + "Tatoeba (tgl-eng)": 63.19, + "Tatoeba (tha-eng)": 96.38, + "Tatoeba (tuk-eng)": 16.35, + "Tatoeba (tur-eng)": 98.03, + "Tatoeba (tzl-eng)": 36.56, + "Tatoeba (uig-eng)": 56.49, + "Tatoeba (ukr-eng)": 93.52, + "Tatoeba (urd-eng)": 84.23, + "Tatoeba (uzb-eng)": 23.2, + "Tatoeba (vie-eng)": 96.73, + "Tatoeba (war-eng)": 8.25, + "Tatoeba (wuu-eng)": 75.09, + "Tatoeba (xho-eng)": 4.68, + "Tatoeba (yid-eng)": 2.49, + "Tatoeba (yue-eng)": 87.75, + "Tatoeba (zsm-eng)": 95.41 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "LASER2", + "AmazonCounterfactualClassification (de)": 67.82, + "AmazonCounterfactualClassification (en)": 76.84, + "AmazonCounterfactualClassification (en-ext)": 76.17, + "AmazonCounterfactualClassification (ja)": 68.76, + "AmazonPolarityClassification": 61.01, + "AmazonReviewsClassification (de)": 31.07, + "AmazonReviewsClassification (en)": 28.71, + "AmazonReviewsClassification (es)": 32.72, + "AmazonReviewsClassification (fr)": 31.12, + "AmazonReviewsClassification (ja)": 28.94, + "AmazonReviewsClassification (zh)": 30.89, + "Banking77Classification": 57.76, + "EmotionClassification": 24.83, + "ImdbClassification": 57.58, + "MTOPDomainClassification (de)": 74.08, + "MTOPDomainClassification (en)": 75.36, + "MTOPDomainClassification (es)": 73.47, + "MTOPDomainClassification (fr)": 72.26, + "MTOPDomainClassification (hi)": 72.95, + "MTOPDomainClassification (th)": 72.68, + "MTOPIntentClassification (de)": 51.62, + "MTOPIntentClassification (en)": 49.47, + "MTOPIntentClassification (es)": 52.75, + "MTOPIntentClassification (fr)": 50.12, + "MTOPIntentClassification (hi)": 45.55, + "MTOPIntentClassification (th)": 50.07, + "MasakhaNEWSClassification (fra)": 65.9, + "MassiveIntentClassification (af)": 38.01, + "MassiveIntentClassification (am)": 12.7, + "MassiveIntentClassification (ar)": 37.16, + "MassiveIntentClassification (az)": 19.98, + "MassiveIntentClassification (bn)": 42.51, + "MassiveIntentClassification (cy)": 17.33, + "MassiveIntentClassification (da)": 45.61, + "MassiveIntentClassification (de)": 44.79, + "MassiveIntentClassification (el)": 46.71, + "MassiveIntentClassification (en)": 47.91, + "MassiveIntentClassification (es)": 45.44, + "MassiveIntentClassification (fa)": 45.01, + "MassiveIntentClassification (fi)": 45.94, + "MassiveIntentClassification (fr)": 46.13, + "MassiveIntentClassification (he)": 42.55, + "MassiveIntentClassification (hi)": 40.2, + "MassiveIntentClassification (hu)": 42.77, + "MassiveIntentClassification (hy)": 28.07, + "MassiveIntentClassification (id)": 45.81, + "MassiveIntentClassification (is)": 39.86, + "MassiveIntentClassification (it)": 48.25, + "MassiveIntentClassification (ja)": 45.3, + "MassiveIntentClassification (jv)": 24.3, + "MassiveIntentClassification (ka)": 22.7, + "MassiveIntentClassification (km)": 22.48, + "MassiveIntentClassification (kn)": 4.32, + "MassiveIntentClassification (ko)": 44.26, + "MassiveIntentClassification (lv)": 39.75, + "MassiveIntentClassification (ml)": 41.33, + "MassiveIntentClassification (mn)": 16.2, + "MassiveIntentClassification (ms)": 43.23, + "MassiveIntentClassification (my)": 25.37, + "MassiveIntentClassification (nb)": 37.74, + "MassiveIntentClassification (nl)": 45.0, + "MassiveIntentClassification (pl)": 44.99, + "MassiveIntentClassification (pt)": 48.55, + "MassiveIntentClassification (ro)": 44.3, + "MassiveIntentClassification (ru)": 44.29, + "MassiveIntentClassification (sl)": 44.72, + "MassiveIntentClassification (sq)": 46.12, + "MassiveIntentClassification (sv)": 45.95, + "MassiveIntentClassification (sw)": 31.89, + "MassiveIntentClassification (ta)": 29.63, + "MassiveIntentClassification (te)": 36.03, + "MassiveIntentClassification (th)": 43.39, + "MassiveIntentClassification (tl)": 29.73, + "MassiveIntentClassification (tr)": 43.93, + "MassiveIntentClassification (ur)": 26.11, + "MassiveIntentClassification (vi)": 44.33, + "MassiveIntentClassification (zh-CN)": 40.62, + "MassiveIntentClassification (zh-TW)": 32.93, + "MassiveScenarioClassification (af)": 47.1, + "MassiveScenarioClassification (am)": 17.7, + "MassiveScenarioClassification (ar)": 45.21, + "MassiveScenarioClassification (az)": 28.21, + "MassiveScenarioClassification (bn)": 50.52, + "MassiveScenarioClassification (cy)": 22.58, + "MassiveScenarioClassification (da)": 54.87, + "MassiveScenarioClassification (de)": 54.34, + "MassiveScenarioClassification (el)": 55.47, + "MassiveScenarioClassification (en)": 55.92, + "MassiveScenarioClassification (es)": 52.77, + "MassiveScenarioClassification (fa)": 52.5, + "MassiveScenarioClassification (fi)": 52.63, + "MassiveScenarioClassification (fr)": 54.32, + "MassiveScenarioClassification (he)": 52.41, + "MassiveScenarioClassification (hi)": 47.37, + "MassiveScenarioClassification (hu)": 53.43, + "MassiveScenarioClassification (hy)": 33.57, + "MassiveScenarioClassification (id)": 54.38, + "MassiveScenarioClassification (is)": 49.78, + "MassiveScenarioClassification (it)": 54.84, + "MassiveScenarioClassification (ja)": 54.12, + "MassiveScenarioClassification (jv)": 32.71, + "MassiveScenarioClassification (ka)": 26.92, + "MassiveScenarioClassification (km)": 27.23, + "MassiveScenarioClassification (kn)": 10.06, + "MassiveScenarioClassification (ko)": 52.01, + "MassiveScenarioClassification (lv)": 44.82, + "MassiveScenarioClassification (ml)": 49.1, + "MassiveScenarioClassification (mn)": 21.51, + "MassiveScenarioClassification (ms)": 53.6, + "MassiveScenarioClassification (my)": 29.72, + "MassiveScenarioClassification (nb)": 43.9, + "MassiveScenarioClassification (nl)": 53.33, + "MassiveScenarioClassification (pl)": 52.92, + "MassiveScenarioClassification (pt)": 53.41, + "MassiveScenarioClassification (ro)": 50.48, + "MassiveScenarioClassification (ru)": 51.84, + "MassiveScenarioClassification (sl)": 51.29, + "MassiveScenarioClassification (sq)": 55.65, + "MassiveScenarioClassification (sv)": 54.64, + "MassiveScenarioClassification (sw)": 42.04, + "MassiveScenarioClassification (ta)": 36.72, + "MassiveScenarioClassification (te)": 42.08, + "MassiveScenarioClassification (th)": 52.15, + "MassiveScenarioClassification (tl)": 37.34, + "MassiveScenarioClassification (tr)": 52.56, + "MassiveScenarioClassification (ur)": 32.6, + "MassiveScenarioClassification (vi)": 50.97, + "MassiveScenarioClassification (zh-CN)": 50.22, + "MassiveScenarioClassification (zh-TW)": 42.32, + "ToxicConversationsClassification": 54.05, + "TweetSentimentExtractionClassification": 48.73 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "LASER2", + "AlloProfClusteringP2P": 48.45, + "AlloProfClusteringS2S": 25.81, + "ArxivClusteringP2P": 17.77, + "ArxivClusteringS2S": 12.39, + "BiorxivClusteringP2P": 12.4, + "BiorxivClusteringS2S": 8.83, + "HALClusteringS2S": 11.52, + "MLSUMClusteringP2P": 34.53, + "MLSUMClusteringS2S": 27.35, + "MasakhaNEWSClusteringP2P (fra)": 32.04, + "MasakhaNEWSClusteringS2S (fra)": 29.77, + "MedrxivClusteringP2P": 17.91, + "MedrxivClusteringS2S": 16.63, + "RedditClustering": 9.96, + "RedditClusteringP2P": 26.42, + "StackExchangeClustering": 15.79, + "StackExchangeClusteringP2P": 18.63, + "TwentyNewsgroupsClustering": 11.38 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "LASER2", + "OpusparcusPC (fr)": 93.77, + "PawsXPairClassification (fr)": 69.53, + "SprintDuplicateQuestions": 65.54, + "TwitterSemEval2015": 59.57, + "TwitterURLCorpus": 81.47 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "LASER2", + "AlloprofReranking": 35.29, + "AskUbuntuDupQuestions": 48.99, + "MindSmallReranking": 24.79, + "SciDocsRR": 54.99, + "StackOverflowDupQuestions": 36.98, + "SyntecReranking": 55.93 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "LASER2", + "AlloprofRetrieval": 3.1, + "ArguAna": 12.86, + "BSARDRetrieval": 0.36, + "CQADupstackRetrieval": 4.12, + "ClimateFEVER": 0.36, + "DBPedia": 1.53, + "FEVER": 0.77, + "FiQA2018": 1.73, + "HotpotQA": 5.5, + "MSMARCO": 1.09, + "MintakaRetrieval (fr)": 6.31, + "NFCorpus": 2.44, + "NQ": 0.64, + "QuoraRetrieval": 71.14, + "SCIDOCS": 0.78, + "SciFact": 4.04, + "SyntecRetrieval": 28.58, + "TRECCOVID": 10.97, + "Touche2020": 1.06, + "XPQARetrieval (fr)": 42.59 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "LASER2", + "BIOSSES": 62.01, + "SICK-R": 62.86, + "SICKFr": 64.95, + "STS12": 62.6, + "STS13": 59.62, + "STS14": 57.03, + "STS15": 71.57, + "STS16": 70.75, + "STS17 (ar-ar)": 67.47, + "STS17 (en-ar)": 65.05, + "STS17 (en-de)": 66.66, + "STS17 (en-en)": 76.73, + "STS17 (en-tr)": 70.05, + "STS17 (es-en)": 55.3, + "STS17 (es-es)": 79.67, + "STS17 (fr-en)": 70.82, + "STS17 (it-en)": 70.98, + "STS17 (ko-ko)": 70.52, + "STS17 (nl-en)": 68.12, + "STS22 (ar)": 42.57, + "STS22 (de)": 25.69, + "STS22 (de-en)": 32.35, + "STS22 (de-fr)": 37.41, + "STS22 (de-pl)": 15.67, + "STS22 (en)": 39.76, + "STS22 (es)": 54.92, + "STS22 (es-en)": 54.34, + "STS22 (es-it)": 42.21, + "STS22 (fr)": 58.61, + "STS22 (fr-pl)": 39.44, + "STS22 (it)": 60.31, + "STS22 (pl)": 18.34, + "STS22 (pl-en)": 53.63, + "STS22 (ru)": 39.24, + "STS22 (tr)": 36.97, + "STS22 (zh)": 49.41, + "STS22 (zh-en)": 46.19, + "STSBenchmark": 69.77, + "STSBenchmarkMultilingualSTS (fr)": 69.82 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "LASER2", + "SummEval": 26.8, + "SummEvalFr": 31.56 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "LASER2" + } + ] + } + }, + "voyage-code-2": { + "BitextMining": { + "f1": [ + { + "Model": "voyage-code-2" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "voyage-code-2", + "AmazonReviewsClassification (fr)": 42.15, + "MTOPDomainClassification (fr)": 87.68, + "MTOPIntentClassification (fr)": 59.44, + "MasakhaNEWSClassification (fra)": 82.13, + "MassiveIntentClassification (fr)": 63.08, + "MassiveScenarioClassification (fr)": 70.15 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "voyage-code-2", + "AlloProfClusteringP2P": 61.63, + "AlloProfClusteringS2S": 50.67, + "HALClusteringS2S": 27.44, + "MLSUMClusteringP2P": 45.23, + "MLSUMClusteringS2S": 41.48, + "MasakhaNEWSClusteringP2P (fra)": 56.59, + "MasakhaNEWSClusteringS2S (fra)": 35.18 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "voyage-code-2", + "OpusparcusPC (fr)": 92.87, + "PawsXPairClassification (fr)": 60.83 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "voyage-code-2", + "AlloprofReranking": 70.79, + "SyntecReranking": 86.77 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "voyage-code-2", + "AlloprofRetrieval": 52.61, + "BSARDRetrieval": 0.29, + "MintakaRetrieval (fr)": 19.05, + "SyntecRetrieval": 82.77, + "XPQARetrieval (fr)": 71.95 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "voyage-code-2", + "SICKFr": 73.56, + "STS22 (fr)": 79.99, + "STSBenchmarkMultilingualSTS (fr)": 79.02 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "voyage-code-2", + "SummEvalFr": 28.34 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "voyage-code-2" + } + ] + } + }, + "sup-simcse-bert-base-uncased": { + "BitextMining": { + "f1": [ + { + "Model": "sup-simcse-bert-base-uncased" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "sup-simcse-bert-base-uncased", + "AmazonCounterfactualClassification (en)": 75.75, + "AmazonPolarityClassification": 82.47, + "AmazonReviewsClassification (en)": 39.6, + "Banking77Classification": 75.76, + "EmotionClassification": 44.81, + "ImdbClassification": 73.53, + "MTOPDomainClassification (en)": 84.29, + "MTOPIntentClassification (en)": 63.14, + "MassiveIntentClassification (en)": 65.95, + "MassiveScenarioClassification (en)": 70.78, + "ToxicConversationsClassification": 72.04, + "TweetSentimentExtractionClassification": 59.73 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "sup-simcse-bert-base-uncased", + "ArxivClusteringP2P": 35.18, + "ArxivClusteringS2S": 27.54, + "BiorxivClusteringP2P": 30.15, + "BiorxivClusteringS2S": 24.67, + "MedrxivClusteringP2P": 26.25, + "MedrxivClusteringS2S": 24.12, + "RedditClustering": 40.23, + "RedditClusteringP2P": 47.74, + "StackExchangeClustering": 47.55, + "StackExchangeClusteringP2P": 29.45, + "TwentyNewsgroupsClustering": 34.86 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "sup-simcse-bert-base-uncased", + "SprintDuplicateQuestions": 69.39, + "TwitterSemEval2015": 67.75, + "TwitterURLCorpus": 83.89 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "sup-simcse-bert-base-uncased", + "AskUbuntuDupQuestions": 51.8, + "MindSmallReranking": 29.3, + "SciDocsRR": 70.14, + "StackOverflowDupQuestions": 38.9 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "sup-simcse-bert-base-uncased", + "ArguAna": 38.33, + "CQADupstackRetrieval": 14.5, + "ClimateFEVER": 11.98, + "DBPedia": 19.73, + "FEVER": 20.41, + "FiQA2018": 10.41, + "HotpotQA": 22.9, + "MSMARCO": 11.0, + "NFCorpus": 12.42, + "NQ": 16.08, + "QuoraRetrieval": 79.62, + "SCIDOCS": 7.53, + "SciFact": 29.59, + "TRECCOVID": 22.93, + "Touche2020": 9.9 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "sup-simcse-bert-base-uncased", + "BIOSSES": 68.38, + "SICK-R": 80.77, + "STS12": 75.3, + "STS13": 84.67, + "STS14": 80.19, + "STS15": 85.4, + "STS16": 80.82, + "STS17 (en-en)": 89.44, + "STS22 (en)": 61.96, + "STSBenchmark": 84.25 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "sup-simcse-bert-base-uncased", + "SummEval": 31.17 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "sup-simcse-bert-base-uncased" + } + ] + } + }, + "voyage-law-2": { + "BitextMining": { + "f1": [ + { + "Model": "voyage-law-2" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "voyage-law-2", + "AmazonReviewsClassification (fr)": 41.98, + "MTOPDomainClassification (fr)": 90.12, + "MTOPIntentClassification (fr)": 62.44, + "MasakhaNEWSClassification (fra)": 76.42, + "MassiveIntentClassification (fr)": 66.94, + "MassiveScenarioClassification (fr)": 72.78 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "voyage-law-2", + "AlloProfClusteringP2P": 62.5, + "AlloProfClusteringS2S": 44.28, + "HALClusteringS2S": 26.36, + "MLSUMClusteringP2P (fr)": 44.03, + "MLSUMClusteringS2S (fr)": 42.95, + "MasakhaNEWSClusteringP2P (fra)": 50.68, + "MasakhaNEWSClusteringS2S (fra)": 38.79 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "voyage-law-2", + "OpusparcusPC (fr)": 93.06, + "PawsXPairClassification (fr)": 61.54 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "voyage-law-2", + "AlloprofReranking": 72.92, + "SyntecReranking": 91.2 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "voyage-law-2", + "AILACasedocs": 44.56, + "AILAStatutes": 45.51, + "AlloprofRetrieval": 57.28, + "BSARDRetrieval": 11.83, + "GerDaLIRSmall": 44.91, + "LEMBNarrativeQARetrieval": 55.78, + "LEMBNeedleRetrieval": 80.5, + "LEMBPasskeyRetrieval": 93.75, + "LEMBQMSumRetrieval": 57.26, + "LEMBSummScreenFDRetrieval": 98.72, + "LEMBWikimQARetrieval": 87.08, + "LeCaRDv2": 72.75, + "LegalBenchConsumerContractsQA": 83.27, + "LegalBenchCorporateLobbying": 95.66, + "LegalQuAD": 67.47, + "LegalSummarization": 68.96, + "MintakaRetrieval (fr)": 34.92, + "SyntecRetrieval": 87.33, + "XPQARetrieval (fr)": 73.56 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "voyage-law-2", + "SICKFr": 74.09, + "STS22 (fr)": 83.75, + "STSBenchmarkMultilingualSTS (fr)": 83.02 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "voyage-law-2", + "SummEvalFr": 30.34 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "voyage-law-2" + } + ] + } + }, + "bert-base-uncased": { + "BitextMining": { + "f1": [ + { + "Model": "bert-base-uncased" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "bert-base-uncased", + "AmazonCounterfactualClassification (en)": 74.25, + "AmazonPolarityClassification": 71.33, + "AmazonReviewsClassification (en)": 33.56, + "Banking77Classification": 63.41, + "EmotionClassification": 35.28, + "ImdbClassification": 65.35, + "MTOPDomainClassification (en)": 82.63, + "MTOPIntentClassification (en)": 68.14, + "MassiveIntentClassification (en)": 59.88, + "MassiveScenarioClassification (en)": 64.28, + "ToxicConversationsClassification": 70.0, + "TweetSentimentExtractionClassification": 51.81 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "bert-base-uncased", + "ArxivClusteringP2P": 35.19, + "ArxivClusteringS2S": 27.51, + "BiorxivClusteringP2P": 30.12, + "BiorxivClusteringS2S": 24.77, + "MedrxivClusteringP2P": 26.09, + "MedrxivClusteringS2S": 23.6, + "RedditClustering": 27.24, + "RedditClusteringP2P": 43.32, + "StackExchangeClustering": 43.58, + "StackExchangeClusteringP2P": 26.55, + "TwentyNewsgroupsClustering": 23.35 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "bert-base-uncased", + "SprintDuplicateQuestions": 36.81, + "TwitterSemEval2015": 55.9, + "TwitterURLCorpus": 76.29 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "bert-base-uncased", + "AskUbuntuDupQuestions": 45.84, + "MindSmallReranking": 28.37, + "SciDocsRR": 64.94, + "StackOverflowDupQuestions": 34.62 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "bert-base-uncased", + "ArguAna": 28.29, + "CQADupstackRetrieval": 5.51, + "ClimateFEVER": 5.41, + "DBPedia": 4.13, + "FEVER": 3.3, + "FiQA2018": 2.19, + "HotpotQA": 8.26, + "MSMARCO": 1.91, + "NFCorpus": 4.3, + "NQ": 2.62, + "QuoraRetrieval": 61.03, + "SCIDOCS": 2.82, + "SciFact": 13.34, + "TRECCOVID": 14.74, + "Touche2020": 0.97 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "bert-base-uncased", + "BIOSSES": 54.7, + "SICK-R": 58.65, + "STS12": 30.87, + "STS13": 59.89, + "STS14": 47.73, + "STS15": 60.29, + "STS16": 63.73, + "STS17 (en-en)": 64.1, + "STS22 (en)": 56.37, + "STSBenchmark": 47.29 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "bert-base-uncased", + "SummEval": 29.82 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "bert-base-uncased" + } + ] + } + }, + "udever-bloom-560m": { + "BitextMining": { + "f1": [ + { + "Model": "udever-bloom-560m" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "udever-bloom-560m", + "AmazonReviewsClassification (fr)": 26.85, + "MTOPDomainClassification (fr)": 34.99, + "MTOPIntentClassification (fr)": 15.76, + "MasakhaNEWSClassification (fra)": 67.94, + "MassiveIntentClassification (fr)": 15.09, + "MassiveScenarioClassification (fr)": 21.67 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "udever-bloom-560m", + "AlloProfClusteringP2P": 53.57, + "AlloProfClusteringS2S": 22.13, + "HALClusteringS2S": 7.68, + "MLSUMClusteringP2P": 36.43, + "MLSUMClusteringS2S": 25.26, + "MasakhaNEWSClusteringP2P (fra)": 37.57, + "MasakhaNEWSClusteringS2S (fra)": 20.58 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "udever-bloom-560m", + "OpusparcusPC (fr)": 82.1, + "PawsXPairClassification (fr)": 59.69 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "udever-bloom-560m", + "AlloprofReranking": 28.75, + "SyntecReranking": 50.88 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "udever-bloom-560m", + "AlloprofRetrieval": 1.98, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 0.48, + "SyntecRetrieval": 24.45, + "XPQARetrieval (fr)": 12.98 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "udever-bloom-560m", + "SICKFr": 54.54, + "STS22 (fr)": 61.35, + "STSBenchmarkMultilingualSTS (fr)": 36.78 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "udever-bloom-560m", + "SummEvalFr": 23.63 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "udever-bloom-560m" + } + ] + } + }, + "bge-base-zh-v1.5": { + "BitextMining": { + "f1": [ + { + "Model": "bge-base-zh-v1.5" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "bge-base-zh-v1.5", + "AmazonReviewsClassification (zh)": 40.15, + "IFlyTek": 48.62, + "JDReview": 83.62, + "MassiveIntentClassification (zh-CN)": 67.93, + "MassiveScenarioClassification (zh-CN)": 73.98, + "MultilingualSentiment": 70.67, + "OnlineShopping": 91.26, + "TNews": 51.08, + "Waimai": 85.36 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "bge-base-zh-v1.5", + "CLSClusteringP2P": 39.91, + "CLSClusteringS2S": 37.63, + "ThuNewsClusteringP2P": 58.45, + "ThuNewsClusteringS2S": 54.12 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "bge-base-zh-v1.5", + "Cmnli": 84.1, + "Ocnli": 75.41 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "bge-base-zh-v1.5", + "CMedQAv1": 80.47, + "CMedQAv2": 84.88, + "MMarcoReranking": 29.74, + "T2Reranking": 66.49 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "bge-base-zh-v1.5", + "CmedqaRetrieval": 41.61, + "CovidRetrieval": 74.7, + "DuRetrieval": 85.07, + "EcomRetrieval": 64.25, + "MMarcoRetrieval": 77.69, + "MedicalRetrieval": 56.51, + "T2Retrieval": 83.71, + "VideoRetrieval": 72.35 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "bge-base-zh-v1.5", + "AFQMC": 42.4, + "ATEC": 48.17, + "BQ": 61.78, + "LCQMC": 74.45, + "PAWSX": 20.4, + "QBQTC": 36.22, + "STS22 (zh)": 68.01, + "STSB": 78.31 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "bge-base-zh-v1.5" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "bge-base-zh-v1.5" + } + ] + } + }, + "text-search-ada-001": { + "BitextMining": { + "f1": [ + { + "Model": "text-search-ada-001" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "text-search-ada-001" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "text-search-ada-001", + "BiorxivClusteringS2S": 26.05, + "MedrxivClusteringS2S": 25.67, + "TwentyNewsgroupsClustering": 44.92 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "text-search-ada-001" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "text-search-ada-001" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "text-search-ada-001", + "ArguAna": 46.91, + "ClimateFEVER": 18.5, + "DBPedia": 36.2, + "FEVER": 72.1, + "FiQA2018": 38.41, + "HotpotQA": 59.39, + "MSMARCO": 37.94, + "NFCorpus": 33.17, + "NQ": 42.81, + "QuoraRetrieval": 70.57, + "SCIDOCS": 14.83, + "SciFact": 67.25, + "TRECCOVID": 72.43, + "Touche2020": 28.68 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "text-search-ada-001" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "text-search-ada-001" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "text-search-ada-001" + } + ] + } + }, + "text-similarity-davinci-001": { + "BitextMining": { + "f1": [ + { + "Model": "text-similarity-davinci-001" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "text-similarity-davinci-001" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "text-similarity-davinci-001", + "RedditClustering": 31.78, + "StackExchangeClustering": 36.86, + "TwentyNewsgroupsClustering": 29.33 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "text-similarity-davinci-001", + "SprintDuplicateQuestions": 69.52, + "TwitterSemEval2015": 74.42, + "TwitterURLCorpus": 83.75 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "text-similarity-davinci-001", + "AskUbuntuDupQuestions": 53.56, + "SciDocsRR": 68.7, + "StackOverflowDupQuestions": 39.41 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "text-similarity-davinci-001" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "text-similarity-davinci-001", + "BIOSSES": 68.95, + "SICK-R": 78.72, + "STSBenchmark": 84.08 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "text-similarity-davinci-001" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "text-similarity-davinci-001" + } + ] + } + }, + "dragon-plus-instruct": { + "BitextMining": { + "f1": [ + { + "Model": "dragon-plus-instruct" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "dragon-plus-instruct" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "dragon-plus-instruct" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "dragon-plus-instruct" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "dragon-plus-instruct" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "dragon-plus-instruct", + "ARCChallenge": 8.24, + "AlphaNLI": 25.18, + "HellaSwag": 24.06, + "PIQA": 26.35, + "Quail": 4.2, + "RARbCode": 12.84, + "RARbMath": 36.15, + "SIQA": 1.75, + "SpartQA": 10.82, + "TempReasonL1": 1.54, + "TempReasonL2Fact": 16.11, + "TempReasonL2Pure": 0.57, + "TempReasonL3Fact": 14.81, + "TempReasonL3Pure": 7.46, + "WinoGrande": 60.84 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "dragon-plus-instruct" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "dragon-plus-instruct" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "dragon-plus-instruct" + } + ] + } + }, + "text-embedding-ada-002-instruct": { + "BitextMining": { + "f1": [ + { + "Model": "text-embedding-ada-002-instruct" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "text-embedding-ada-002-instruct" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "text-embedding-ada-002-instruct" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "text-embedding-ada-002-instruct" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "text-embedding-ada-002-instruct" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "text-embedding-ada-002-instruct", + "ARCChallenge": 11.85, + "AlphaNLI": 10.62, + "HellaSwag": 24.8, + "PIQA": 23.87, + "Quail": 5.79, + "RARbCode": 82.36, + "RARbMath": 67.26, + "SIQA": 2.64, + "SpartQA": 4.75, + "TempReasonL1": 1.44, + "TempReasonL2Fact": 19.38, + "TempReasonL2Pure": 2.43, + "TempReasonL3Fact": 17.58, + "TempReasonL3Pure": 7.31, + "WinoGrande": 11.36 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "text-embedding-ada-002-instruct" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "text-embedding-ada-002-instruct" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "text-embedding-ada-002-instruct" + } + ] + } + }, + "rubert-tiny2": { + "BitextMining": { + "f1": [ + { + "Model": "rubert-tiny2" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "rubert-tiny2", + "GeoreviewClassification (rus-Cyrl)": 39.64, + "HeadlineClassification (rus-Cyrl)": 74.19, + "InappropriatenessClassification (rus-Cyrl)": 58.57, + "KinopoiskClassification (rus-Cyrl)": 49.06, + "MassiveIntentClassification (rus-Cyrl)": 50.83, + "MassiveScenarioClassification (rus-Cyrl)": 59.15, + "RuReviewsClassification (rus-Cyrl)": 56.99, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 45.63, + "RuSciBenchOECDClassification (rus-Cyrl)": 35.48 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "rubert-tiny2", + "GeoreviewClusteringP2P (rus-Cyrl)": 44.18, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 41.41, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 38.09 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "rubert-tiny2", + "TERRa (rus-Cyrl)": 51.87 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "rubert-tiny2", + "RuBQReranking (rus-Cyrl)": 46.09 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "rubert-tiny2", + "RiaNewsRetrieval (rus-Cyrl)": 13.92, + "RuBQRetrieval (rus-Cyrl)": 10.87 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "rubert-tiny2", + "RUParaPhraserSTS (rus-Cyrl)": 65.14, + "RuSTSBenchmarkSTS (rus-Cyrl)": 69.43, + "STS22 (rus-Cyrl)": 50.23 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "rubert-tiny2" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "rubert-tiny2" + } + ] + } + }, + "all-mpnet-base-v2-instruct": { + "BitextMining": { + "f1": [ + { + "Model": "all-mpnet-base-v2-instruct" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "all-mpnet-base-v2-instruct" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "all-mpnet-base-v2-instruct" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "all-mpnet-base-v2-instruct" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "all-mpnet-base-v2-instruct" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "all-mpnet-base-v2-instruct", + "ARCChallenge": 10.35, + "AlphaNLI": 1.96, + "HellaSwag": 13.01, + "PIQA": 27.18, + "Quail": 3.02, + "RARbCode": 48.95, + "RARbMath": 69.21, + "SIQA": 1.29, + "SpartQA": 1.01, + "TempReasonL1": 1.52, + "TempReasonL2Fact": 7.28, + "TempReasonL2Pure": 1.03, + "TempReasonL3Fact": 7.03, + "TempReasonL3Pure": 5.16, + "WinoGrande": 9.66 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "all-mpnet-base-v2-instruct" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "all-mpnet-base-v2-instruct" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "all-mpnet-base-v2-instruct" + } + ] + } + }, + "bert-base-10lang-cased": { + "BitextMining": { + "f1": [ + { + "Model": "bert-base-10lang-cased" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "bert-base-10lang-cased", + "AmazonReviewsClassification (fr)": 29.38, + "MTOPDomainClassification (fr)": 63.65, + "MTOPIntentClassification (fr)": 37.87, + "MasakhaNEWSClassification (fra)": 63.93, + "MassiveIntentClassification (fr)": 37.28, + "MassiveScenarioClassification (fr)": 44.5 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "bert-base-10lang-cased", + "AlloProfClusteringP2P": 53.22, + "AlloProfClusteringS2S": 42.92, + "HALClusteringS2S": 19.94, + "MLSUMClusteringP2P": 40.96, + "MLSUMClusteringS2S": 31.87, + "MasakhaNEWSClusteringP2P (fra)": 24.23, + "MasakhaNEWSClusteringS2S (fra)": 24.46 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "bert-base-10lang-cased", + "OpusparcusPC (fr)": 86.79, + "PawsXPairClassification (fr)": 53.4 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "bert-base-10lang-cased", + "AlloprofReranking": 36.21, + "SyntecReranking": 53.25 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "bert-base-10lang-cased", + "AlloprofRetrieval": 1.6, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 3.55, + "SyntecRetrieval": 18.95, + "XPQARetrieval (fr)": 18.39 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "bert-base-10lang-cased", + "SICKFr": 58.76, + "STS22 (fr)": 40.31, + "STSBenchmarkMultilingualSTS (fr)": 52.25 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "bert-base-10lang-cased", + "SummEvalFr": 29.06 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "bert-base-10lang-cased" + } + ] + } + }, + "e5-mistral-7b-instruct": { + "BitextMining": { + "f1": [ + { + "Model": "e5-mistral-7b-instruct", + "Tatoeba (rus-Cyrl_eng-Latn)": 93.75 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "e5-mistral-7b-instruct", + "AmazonReviewsClassification (fr)": 36.71, + "GeoreviewClassification (rus-Cyrl)": 50.25, + "HeadlineClassification (rus-Cyrl)": 85.68, + "InappropriatenessClassification (rus-Cyrl)": 67.19, + "KinopoiskClassification (rus-Cyrl)": 65.49, + "MTOPDomainClassification (fr)": 74.8, + "MTOPIntentClassification (fr)": 53.97, + "MasakhaNEWSClassification (fra)": 80.59, + "MassiveIntentClassification (rus-Cyrl)": 76.08, + "MassiveIntentClassification (fr)": 46.39, + "MassiveScenarioClassification (rus-Cyrl)": 79.61, + "MassiveScenarioClassification (fr)": 53.86, + "RuReviewsClassification (rus-Cyrl)": 67.68, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 64.59, + "RuSciBenchOECDClassification (rus-Cyrl)": 51.13 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "e5-mistral-7b-instruct", + "AlloProfClusteringP2P": 61.06, + "AlloProfClusteringS2S": 28.12, + "GeoreviewClusteringP2P (rus-Cyrl)": 65.68, + "HALClusteringS2S": 19.69, + "MLSUMClusteringP2P": 45.59, + "MLSUMClusteringS2S": 32.0, + "MasakhaNEWSClusteringP2P (fra)": 52.47, + "MasakhaNEWSClusteringS2S (fra)": 49.2, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 61.55, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 52.72 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "e5-mistral-7b-instruct", + "OpusparcusPC (rus-Cyrl)": 91.44, + "OpusparcusPC (fr)": 88.5, + "PawsXPairClassification (fr)": 63.65, + "TERRa (rus-Cyrl)": 59.38 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "e5-mistral-7b-instruct", + "AlloprofReranking": 47.36, + "RuBQReranking (rus-Cyrl)": 74.61, + "SyntecReranking": 77.05 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "e5-mistral-7b-instruct", + "AILACasedocs": 38.76, + "AILAStatutes": 38.07, + "ARCChallenge": 17.81, + "AlloprofRetrieval": 16.46, + "AlphaNLI": 26.12, + "BSARDRetrieval": 0.0, + "BrightRetrieval (sustainable_living)": 18.51, + "BrightRetrieval (economics)": 15.49, + "BrightRetrieval (theoremqa_theorems)": 23.78, + "BrightRetrieval (aops)": 7.1, + "BrightRetrieval (theoremqa_questions)": 23.94, + "BrightRetrieval (stackoverflow)": 9.83, + "BrightRetrieval (psychology)": 15.79, + "BrightRetrieval (pony)": 4.81, + "BrightRetrieval (leetcode)": 28.72, + "BrightRetrieval (biology)": 18.84, + "BrightRetrieval (earth_science)": 25.96, + "BrightRetrieval (robotics)": 16.37, + "GerDaLIRSmall": 37.18, + "HellaSwag": 34.85, + "LEMBNarrativeQARetrieval": 44.62, + "LEMBNeedleRetrieval": 48.25, + "LEMBPasskeyRetrieval": 71.0, + "LEMBQMSumRetrieval": 43.63, + "LEMBSummScreenFDRetrieval": 96.82, + "LEMBWikimQARetrieval": 82.11, + "LeCaRDv2": 68.56, + "LegalBenchConsumerContractsQA": 75.46, + "LegalBenchCorporateLobbying": 94.01, + "LegalQuAD": 59.64, + "LegalSummarization": 66.51, + "MintakaRetrieval (fr)": 3.57, + "PIQA": 39.37, + "Quail": 7.01, + "RARbCode": 78.46, + "RARbMath": 72.16, + "RiaNewsRetrieval (rus-Cyrl)": 81.94, + "RuBQRetrieval (rus-Cyrl)": 73.98, + "SIQA": 5.42, + "SpartQA": 9.92, + "SyntecRetrieval": 55.9, + "TempReasonL1": 3.31, + "TempReasonL2Fact": 36.9, + "TempReasonL2Pure": 9.18, + "TempReasonL3Fact": 30.18, + "TempReasonL3Pure": 14.31, + "WinoGrande": 41.21, + "XPQARetrieval (fr)": 41.29 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "e5-mistral-7b-instruct", + "RUParaPhraserSTS (rus-Cyrl)": 76.17, + "RuSTSBenchmarkSTS (rus-Cyrl)": 84.13, + "SICKFr": 64.39, + "STS22 (fr)": 69.82, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 84.25, + "STSBenchmarkMultilingualSTS (fr)": 61.87 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "e5-mistral-7b-instruct", + "SummEvalFr": 32.22 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "e5-mistral-7b-instruct", + "Core17InstructionRetrieval": 0.09, + "News21InstructionRetrieval": -0.86, + "Robust04InstructionRetrieval": -9.59 + } + ] + } + }, + "instructor-xl": { + "BitextMining": { + "f1": [ + { + "Model": "instructor-xl" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "instructor-xl" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "instructor-xl" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "instructor-xl" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "instructor-xl" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "instructor-xl", + "BrightRetrieval (aops)": 8.26, + "BrightRetrieval (robotics)": 17.39, + "BrightRetrieval (economics)": 22.81, + "BrightRetrieval (stackoverflow)": 19.06, + "BrightRetrieval (leetcode)": 27.5, + "BrightRetrieval (theoremqa_questions)": 14.59, + "BrightRetrieval (psychology)": 27.43, + "BrightRetrieval (biology)": 21.91, + "BrightRetrieval (theoremqa_theorems)": 6.5, + "BrightRetrieval (earth_science)": 34.35, + "BrightRetrieval (sustainable_living)": 18.82, + "BrightRetrieval (pony)": 5.02 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "instructor-xl" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "instructor-xl" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "instructor-xl", + "Core17InstructionRetrieval": 0.69, + "News21InstructionRetrieval": -0.9, + "Robust04InstructionRetrieval": -8.08 + } + ] + } + }, + "distiluse-base-multilingual-cased-v2": { + "BitextMining": { + "f1": [ + { + "Model": "distiluse-base-multilingual-cased-v2" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "distiluse-base-multilingual-cased-v2", + "AllegroReviews": 28.03, + "AmazonCounterfactualClassification (de)": 68.14, + "AmazonCounterfactualClassification (en)": 71.81, + "AmazonCounterfactualClassification (en-ext)": 72.96, + "AmazonCounterfactualClassification (ja)": 65.39, + "AmazonPolarityClassification": 68.0, + "AmazonReviewsClassification (de)": 35.03, + "AmazonReviewsClassification (en)": 35.45, + "AmazonReviewsClassification (es)": 36.24, + "AmazonReviewsClassification (fr)": 35.7, + "AmazonReviewsClassification (ja)": 31.08, + "AmazonReviewsClassification (zh)": 33.89, + "Banking77Classification": 71.48, + "CBD": 60.0, + "EmotionClassification": 40.04, + "ImdbClassification": 61.52, + "MTOPDomainClassification (de)": 86.19, + "MTOPDomainClassification (en)": 91.59, + "MTOPDomainClassification (es)": 87.75, + "MTOPDomainClassification (fr)": 84.61, + "MTOPDomainClassification (hi)": 76.41, + "MTOPDomainClassification (th)": 73.62, + "MTOPIntentClassification (de)": 59.21, + "MTOPIntentClassification (en)": 66.4, + "MTOPIntentClassification (es)": 57.21, + "MTOPIntentClassification (fr)": 53.41, + "MTOPIntentClassification (hi)": 45.54, + "MTOPIntentClassification (th)": 47.73, + "MasakhaNEWSClassification (fra)": 76.87, + "MassiveIntentClassification (af)": 40.02, + "MassiveIntentClassification (am)": 2.35, + "MassiveIntentClassification (ar)": 43.14, + "MassiveIntentClassification (az)": 25.6, + "MassiveIntentClassification (bn)": 4.84, + "MassiveIntentClassification (cy)": 15.43, + "MassiveIntentClassification (da)": 52.33, + "MassiveIntentClassification (de)": 51.57, + "MassiveIntentClassification (el)": 49.65, + "MassiveIntentClassification (en)": 66.71, + "MassiveIntentClassification (es)": 56.57, + "MassiveIntentClassification (fa)": 55.36, + "MassiveIntentClassification (fi)": 45.72, + "MassiveIntentClassification (fr)": 57.02, + "MassiveIntentClassification (he)": 46.74, + "MassiveIntentClassification (hi)": 48.55, + "MassiveIntentClassification (hu)": 50.65, + "MassiveIntentClassification (hy)": 40.79, + "MassiveIntentClassification (id)": 56.0, + "MassiveIntentClassification (is)": 16.08, + "MassiveIntentClassification (it)": 57.65, + "MassiveIntentClassification (ja)": 55.33, + "MassiveIntentClassification (jv)": 28.16, + "MassiveIntentClassification (ka)": 29.41, + "MassiveIntentClassification (km)": 4.79, + "MassiveIntentClassification (kn)": 3.37, + "MassiveIntentClassification (ko)": 49.97, + "MassiveIntentClassification (lv)": 44.31, + "MassiveIntentClassification (ml)": 3.24, + "MassiveIntentClassification (mn)": 40.37, + "MassiveIntentClassification (ms)": 47.97, + "MassiveIntentClassification (my)": 38.48, + "MassiveIntentClassification (nb)": 46.01, + "MassiveIntentClassification (nl)": 58.29, + "MassiveIntentClassification (pl)": 53.1, + "MassiveIntentClassification (pt)": 58.63, + "MassiveIntentClassification (ro)": 50.63, + "MassiveIntentClassification (ru)": 57.96, + "MassiveIntentClassification (sl)": 50.66, + "MassiveIntentClassification (sq)": 50.25, + "MassiveIntentClassification (sv)": 52.41, + "MassiveIntentClassification (sw)": 19.29, + "MassiveIntentClassification (ta)": 3.79, + "MassiveIntentClassification (te)": 3.36, + "MassiveIntentClassification (th)": 45.28, + "MassiveIntentClassification (tl)": 28.44, + "MassiveIntentClassification (tr)": 50.47, + "MassiveIntentClassification (ur)": 46.03, + "MassiveIntentClassification (vi)": 45.25, + "MassiveIntentClassification (zh-CN)": 59.22, + "MassiveIntentClassification (zh-TW)": 54.96, + "MassiveScenarioClassification (af)": 53.67, + "MassiveScenarioClassification (am)": 7.72, + "MassiveScenarioClassification (ar)": 52.19, + "MassiveScenarioClassification (az)": 34.75, + "MassiveScenarioClassification (bn)": 10.65, + "MassiveScenarioClassification (cy)": 21.24, + "MassiveScenarioClassification (da)": 62.55, + "MassiveScenarioClassification (de)": 61.4, + "MassiveScenarioClassification (el)": 60.68, + "MassiveScenarioClassification (en)": 74.0, + "MassiveScenarioClassification (es)": 64.61, + "MassiveScenarioClassification (fa)": 59.24, + "MassiveScenarioClassification (fi)": 54.66, + "MassiveScenarioClassification (fr)": 65.2, + "MassiveScenarioClassification (he)": 54.74, + "MassiveScenarioClassification (hi)": 55.99, + "MassiveScenarioClassification (hu)": 61.2, + "MassiveScenarioClassification (hy)": 49.63, + "MassiveScenarioClassification (id)": 65.25, + "MassiveScenarioClassification (is)": 22.6, + "MassiveScenarioClassification (it)": 64.63, + "MassiveScenarioClassification (ja)": 62.32, + "MassiveScenarioClassification (jv)": 35.77, + "MassiveScenarioClassification (ka)": 39.08, + "MassiveScenarioClassification (km)": 9.24, + "MassiveScenarioClassification (kn)": 8.28, + "MassiveScenarioClassification (ko)": 57.6, + "MassiveScenarioClassification (lv)": 51.72, + "MassiveScenarioClassification (ml)": 8.25, + "MassiveScenarioClassification (mn)": 47.21, + "MassiveScenarioClassification (ms)": 55.65, + "MassiveScenarioClassification (my)": 43.31, + "MassiveScenarioClassification (nb)": 54.98, + "MassiveScenarioClassification (nl)": 67.49, + "MassiveScenarioClassification (pl)": 61.29, + "MassiveScenarioClassification (pt)": 64.26, + "MassiveScenarioClassification (ro)": 58.03, + "MassiveScenarioClassification (ru)": 65.41, + "MassiveScenarioClassification (sl)": 59.36, + "MassiveScenarioClassification (sq)": 62.69, + "MassiveScenarioClassification (sv)": 64.35, + "MassiveScenarioClassification (sw)": 25.12, + "MassiveScenarioClassification (ta)": 8.67, + "MassiveScenarioClassification (te)": 7.82, + "MassiveScenarioClassification (th)": 54.65, + "MassiveScenarioClassification (tl)": 36.09, + "MassiveScenarioClassification (tr)": 60.89, + "MassiveScenarioClassification (ur)": 54.71, + "MassiveScenarioClassification (vi)": 55.15, + "MassiveScenarioClassification (zh-CN)": 66.44, + "MassiveScenarioClassification (zh-TW)": 62.89, + "PAC": 68.17, + "PolEmo2.0-IN": 48.84, + "PolEmo2.0-OUT": 30.0, + "ToxicConversationsClassification": 69.09, + "TweetSentimentExtractionClassification": 59.97 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "distiluse-base-multilingual-cased-v2", + "8TagsClustering": 12.51, + "AlloProfClusteringP2P": 55.95, + "AlloProfClusteringS2S": 35.39, + "ArxivClusteringP2P": 33.59, + "HALClusteringS2S": 18.2, + "MLSUMClusteringP2P": 40.17, + "MLSUMClusteringS2S": 34.65, + "MasakhaNEWSClusteringP2P (fra)": 53.76, + "MasakhaNEWSClusteringS2S (fra)": 32.76 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "distiluse-base-multilingual-cased-v2", + "CDSC-E": 71.83, + "OpusparcusPC (fr)": 92.07, + "PPC": 86.83, + "PSC": 96.35, + "PawsXPairClassification (fr)": 51.08, + "SICK-E-PL": 62.05, + "SprintDuplicateQuestions": 87.15, + "TwitterSemEval2015": 61.67, + "TwitterURLCorpus": 84.02 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "distiluse-base-multilingual-cased-v2", + "AlloprofReranking": 51.77, + "AskUbuntuDupQuestions": 53.75, + "MindSmallReranking": 30.39, + "SciDocsRR": 69.22, + "StackOverflowDupQuestions": 41.92, + "SyntecReranking": 74.78 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "distiluse-base-multilingual-cased-v2", + "AlloprofRetrieval": 26.99, + "ArguAna-PL": 36.7, + "BSARDRetrieval": 0.0, + "DBPedia-PL": 12.36, + "FiQA-PL": 8.02, + "HotpotQA-PL": 20.83, + "MSMARCO-PL": 4.57, + "MintakaRetrieval (fr)": 22.55, + "NFCorpus-PL": 16.28, + "NQ-PL": 5.85, + "Quora-PL": 71.95, + "SCIDOCS-PL": 6.5, + "SciFact-PL": 33.03, + "SyntecRetrieval": 65.34, + "TRECCOVID-PL": 16.91, + "XPQARetrieval (fr)": 51.2 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "distiluse-base-multilingual-cased-v2", + "BIOSSES": 78.34, + "CDSC-R": 87.67, + "SICK-R": 75.25, + "SICK-R-PL": 65.53, + "SICKFr": 72.49, + "STS12": 72.96, + "STS13": 70.58, + "STS14": 70.29, + "STS15": 81.94, + "STS16": 76.8, + "STS17 (ar-ar)": 77.34, + "STS17 (en-ar)": 77.46, + "STS17 (en-de)": 80.24, + "STS17 (en-en)": 86.19, + "STS17 (en-tr)": 74.34, + "STS17 (es-en)": 77.4, + "STS17 (es-es)": 83.71, + "STS17 (fr-en)": 79.28, + "STS17 (it-en)": 80.82, + "STS17 (ko-ko)": 76.4, + "STS17 (nl-en)": 80.51, + "STS22 (ar)": 49.04, + "STS22 (de)": 35.73, + "STS22 (de-en)": 47.51, + "STS22 (de-fr)": 60.76, + "STS22 (de-pl)": 36.09, + "STS22 (en)": 62.88, + "STS22 (es)": 59.34, + "STS22 (es-en)": 68.96, + "STS22 (es-it)": 63.28, + "STS22 (fr)": 76.41, + "STS22 (fr-pl)": 61.98, + "STS22 (it)": 65.1, + "STS22 (pl)": 34.58, + "STS22 (pl-en)": 71.33, + "STS22 (ru)": 52.4, + "STS22 (tr)": 54.07, + "STS22 (zh)": 54.32, + "STS22 (zh-en)": 61.75, + "STSBenchmark": 80.75, + "STSBenchmarkMultilingualSTS (fr)": 77.49 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "distiluse-base-multilingual-cased-v2", + "SummEvalFr": 28.12 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "distiluse-base-multilingual-cased-v2" + } + ] + } + }, + "Cohere-embed-multilingual-v3.0": { + "BitextMining": { + "f1": [ + { + "Model": "Cohere-embed-multilingual-v3.0" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "Cohere-embed-multilingual-v3.0", + "AmazonReviewsClassification (fr)": 41.89, + "MTOPDomainClassification (fr)": 86.23, + "MTOPIntentClassification (fr)": 61.07, + "MasakhaNEWSClassification (fra)": 83.06, + "MassiveIntentClassification (fr)": 62.94, + "MassiveScenarioClassification (fr)": 67.29 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "Cohere-embed-multilingual-v3.0", + "AlloProfClusteringP2P": 63.53, + "AlloProfClusteringS2S": 36.18, + "HALClusteringS2S": 19.9, + "MLSUMClusteringP2P": 45.08, + "MLSUMClusteringS2S": 34.75, + "MasakhaNEWSClusteringP2P (fra)": 53.18, + "MasakhaNEWSClusteringS2S (fra)": 32.31 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "Cohere-embed-multilingual-v3.0", + "OpusparcusPC (fr)": 94.08, + "PawsXPairClassification (fr)": 61.26 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "Cohere-embed-multilingual-v3.0", + "AlloprofReranking": 51.01, + "SyntecReranking": 85.72 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "Cohere-embed-multilingual-v3.0", + "AlloprofRetrieval": 38.36, + "BSARDRetrieval": 0.14, + "MintakaRetrieval (fr)": 25.44, + "SyntecRetrieval": 79.27, + "XPQARetrieval (fr)": 58.87 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "Cohere-embed-multilingual-v3.0", + "SICKFr": 79.23, + "STS22 (fr)": 82.76, + "STSBenchmarkMultilingualSTS (fr)": 81.84 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "Cohere-embed-multilingual-v3.0", + "SummEvalFr": 31.26 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "Cohere-embed-multilingual-v3.0" + } + ] + } + }, + "gbert-base": { + "BitextMining": { + "f1": [ + { + "Model": "gbert-base" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "gbert-base" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "gbert-base", + "BlurbsClusteringP2P": 35.36, + "BlurbsClusteringS2S": 11.27, + "TenKGnadClusteringP2P": 37.16, + "TenKGnadClusteringS2S": 24.23 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "gbert-base" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "gbert-base" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "gbert-base" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "gbert-base" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "gbert-base" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "gbert-base" + } + ] + } + }, + "nb-bert-base": { + "BitextMining": { + "f1": [ + { + "Model": "nb-bert-base", + "BornholmBitextMining": 9.88 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "nb-bert-base", + "AngryTweetsClassification": 52.14, + "DKHateClassification": 61.73, + "DanishPoliticalCommentsClassification": 34.84, + "LccSentimentClassification": 51.4, + "MassiveIntentClassification (da)": 56.69, + "MassiveIntentClassification (nb)": 60.67, + "MassiveIntentClassification (sv)": 53.89, + "MassiveScenarioClassification (da)": 61.93, + "MassiveScenarioClassification (nb)": 67.31, + "MassiveScenarioClassification (sv)": 55.37, + "NoRecClassification": 51.32, + "NordicLangClassification": 84.69, + "NorwegianParliament": 57.41, + "ScalaDaClassification": 57.99, + "ScalaNbClassification": 62.25 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "nb-bert-base" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "nb-bert-base" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "nb-bert-base" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "nb-bert-base" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "nb-bert-base" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "nb-bert-base" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "nb-bert-base" + } + ] + } + }, + "e5-small": { + "BitextMining": { + "f1": [ + { + "Model": "e5-small", + "BornholmBitextMining": 40.27 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "e5-small", + "AngryTweetsClassification": 43.6, + "DKHateClassification": 57.57, + "DanishPoliticalCommentsClassification": 28.37, + "LccSentimentClassification": 40.27, + "MassiveIntentClassification (da)": 41.89, + "MassiveIntentClassification (nb)": 40.25, + "MassiveIntentClassification (sv)": 40.07, + "MassiveScenarioClassification (da)": 49.93, + "MassiveScenarioClassification (nb)": 48.58, + "MassiveScenarioClassification (sv)": 47.06, + "NoRecClassification": 41.84, + "NordicLangClassification": 53.47, + "NorwegianParliament": 56.57, + "ScalaDaClassification": 50.15, + "ScalaNbClassification": 50.03 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "e5-small", + "BiorxivClusteringP2P": 36.1, + "BiorxivClusteringS2S": 31.51, + "MedrxivClusteringP2P": 31.31, + "MedrxivClusteringS2S": 28.32, + "RedditClustering": 43.27, + "RedditClusteringP2P": 57.22, + "StackExchangeClustering": 59.6, + "StackExchangeClusteringP2P": 30.82, + "TwentyNewsgroupsClustering": 37.65 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "e5-small" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "e5-small" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "e5-small" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "e5-small" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "e5-small" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "e5-small" + } + ] + } + }, + "tart-full-flan-t5-xl": { + "BitextMining": { + "f1": [ + { + "Model": "tart-full-flan-t5-xl" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "tart-full-flan-t5-xl" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "tart-full-flan-t5-xl" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "tart-full-flan-t5-xl" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "tart-full-flan-t5-xl" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "tart-full-flan-t5-xl" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "tart-full-flan-t5-xl" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "tart-full-flan-t5-xl" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "tart-full-flan-t5-xl", + "Core17InstructionRetrieval": 2.82, + "News21InstructionRetrieval": 1.99, + "Robust04InstructionRetrieval": -0.72 + } + ] + } + }, + "cross-en-de-roberta-sentence-transformer": { + "BitextMining": { + "f1": [ + { + "Model": "cross-en-de-roberta-sentence-transformer" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "cross-en-de-roberta-sentence-transformer" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "cross-en-de-roberta-sentence-transformer", + "BlurbsClusteringP2P": 30.82, + "BlurbsClusteringS2S": 12.69, + "TenKGnadClusteringP2P": 23.5, + "TenKGnadClusteringS2S": 10.94 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "cross-en-de-roberta-sentence-transformer" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "cross-en-de-roberta-sentence-transformer" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "cross-en-de-roberta-sentence-transformer" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "cross-en-de-roberta-sentence-transformer" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "cross-en-de-roberta-sentence-transformer" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "cross-en-de-roberta-sentence-transformer" + } + ] + } + }, + "gelectra-large": { + "BitextMining": { + "f1": [ + { + "Model": "gelectra-large" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "gelectra-large" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "gelectra-large", + "BlurbsClusteringP2P": 13.96, + "BlurbsClusteringS2S": 7.57, + "TenKGnadClusteringP2P": 11.49, + "TenKGnadClusteringS2S": 3.91 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "gelectra-large" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "gelectra-large" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "gelectra-large" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "gelectra-large" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "gelectra-large" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "gelectra-large" + } + ] + } + }, + "text-embedding-3-large-instruct": { + "BitextMining": { + "f1": [ + { + "Model": "text-embedding-3-large-instruct" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "text-embedding-3-large-instruct" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "text-embedding-3-large-instruct" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "text-embedding-3-large-instruct" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "text-embedding-3-large-instruct" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "text-embedding-3-large-instruct", + "ARCChallenge": 21.22, + "AlphaNLI": 34.23, + "HellaSwag": 31.4, + "PIQA": 37.52, + "Quail": 13.6, + "RARbCode": 89.41, + "RARbMath": 87.73, + "SIQA": 4.99, + "SpartQA": 7.45, + "TempReasonL1": 2.07, + "TempReasonL2Fact": 39.77, + "TempReasonL2Pure": 11.04, + "TempReasonL3Fact": 37.04, + "TempReasonL3Pure": 15.51, + "WinoGrande": 33.92 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "text-embedding-3-large-instruct" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "text-embedding-3-large-instruct" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "text-embedding-3-large-instruct" + } + ] + } + }, + "Cohere-embed-english-v3.0": { + "BitextMining": { + "f1": [ + { + "Model": "Cohere-embed-english-v3.0" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "Cohere-embed-english-v3.0" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "Cohere-embed-english-v3.0" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "Cohere-embed-english-v3.0" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "Cohere-embed-english-v3.0" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "Cohere-embed-english-v3.0", + "AILACasedocs": 31.54, + "AILAStatutes": 27.15, + "ARCChallenge": 9.89, + "AlphaNLI": 15.1, + "BrightRetrieval (psychology)": 21.82, + "BrightRetrieval (economics)": 20.18, + "BrightRetrieval (robotics)": 16.21, + "BrightRetrieval (biology)": 18.98, + "BrightRetrieval (stackoverflow)": 16.47, + "BrightRetrieval (theoremqa_theorems)": 6.04, + "BrightRetrieval (pony)": 1.77, + "BrightRetrieval (sustainable_living)": 17.69, + "BrightRetrieval (aops)": 6.46, + "BrightRetrieval (theoremqa_questions)": 15.07, + "BrightRetrieval (leetcode)": 26.78, + "BrightRetrieval (earth_science)": 27.45, + "GerDaLIRSmall": 6.05, + "HellaSwag": 26.35, + "LeCaRDv2": 21.02, + "LegalBenchConsumerContractsQA": 77.12, + "LegalBenchCorporateLobbying": 93.68, + "LegalQuAD": 26.08, + "LegalSummarization": 61.7, + "PIQA": 28.49, + "Quail": 4.1, + "RARbCode": 57.19, + "RARbMath": 72.26, + "SIQA": 4.26, + "SpartQA": 3.75, + "TempReasonL1": 1.5, + "TempReasonL2Fact": 35.91, + "TempReasonL2Pure": 1.89, + "TempReasonL3Fact": 27.51, + "TempReasonL3Pure": 8.53, + "WinoGrande": 58.01 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "Cohere-embed-english-v3.0" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "Cohere-embed-english-v3.0" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "Cohere-embed-english-v3.0", + "Core17InstructionRetrieval": 2.8, + "News21InstructionRetrieval": 0.2, + "Robust04InstructionRetrieval": -3.63 + } + ] + } + }, + "text2vec-base-multilingual": { + "BitextMining": { + "f1": [ + { + "Model": "text2vec-base-multilingual" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "text2vec-base-multilingual", + "AmazonReviewsClassification (fr)": 34.25, + "MTOPDomainClassification (fr)": 71.83, + "MTOPIntentClassification (fr)": 44.53, + "MasakhaNEWSClassification (fra)": 73.84, + "MassiveIntentClassification (fr)": 51.93, + "MassiveScenarioClassification (fr)": 58.31 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "text2vec-base-multilingual", + "AlloProfClusteringP2P": 49.11, + "AlloProfClusteringS2S": 32.72, + "HALClusteringS2S": 16.19, + "MLSUMClusteringP2P": 36.19, + "MLSUMClusteringS2S": 30.39, + "MasakhaNEWSClusteringP2P (fra)": 38.51, + "MasakhaNEWSClusteringS2S (fra)": 32.51 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "text2vec-base-multilingual", + "OpusparcusPC (fr)": 92.04, + "PawsXPairClassification (fr)": 65.57 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "text2vec-base-multilingual", + "AlloprofReranking": 51.48, + "SyntecReranking": 70.28 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "text2vec-base-multilingual", + "AlloprofRetrieval": 18.9, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 14.81, + "SyntecRetrieval": 49.69, + "XPQARetrieval (fr)": 40.4 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "text2vec-base-multilingual", + "SICKFr": 77.25, + "STS22 (fr)": 74.1, + "STSBenchmarkMultilingualSTS (fr)": 83.48 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "text2vec-base-multilingual", + "SummEvalFr": 29.33 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "text2vec-base-multilingual" + } + ] + } + }, + "msmarco-bert-co-condensor": { + "BitextMining": { + "f1": [ + { + "Model": "msmarco-bert-co-condensor" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "msmarco-bert-co-condensor", + "AmazonCounterfactualClassification (en)": 64.06, + "AmazonPolarityClassification": 66.88, + "AmazonReviewsClassification (en)": 34.85, + "Banking77Classification": 82.35, + "EmotionClassification": 41.91, + "ImdbClassification": 60.17, + "MTOPDomainClassification (en)": 91.34, + "MTOPIntentClassification (en)": 71.07, + "MassiveIntentClassification (en)": 70.4, + "MassiveScenarioClassification (en)": 73.73, + "ToxicConversationsClassification": 64.01, + "TweetSentimentExtractionClassification": 55.74 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "msmarco-bert-co-condensor", + "ArxivClusteringP2P": 36.94, + "ArxivClusteringS2S": 29.03, + "BiorxivClusteringP2P": 32.35, + "BiorxivClusteringS2S": 28.16, + "MedrxivClusteringP2P": 30.23, + "MedrxivClusteringS2S": 27.01, + "RedditClustering": 48.04, + "RedditClusteringP2P": 53.53, + "StackExchangeClustering": 59.54, + "StackExchangeClusteringP2P": 30.48, + "TwentyNewsgroupsClustering": 38.68 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "msmarco-bert-co-condensor", + "SprintDuplicateQuestions": 96.09, + "TwitterSemEval2015": 65.95, + "TwitterURLCorpus": 83.17 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "msmarco-bert-co-condensor", + "AskUbuntuDupQuestions": 58.99, + "MindSmallReranking": 27.13, + "SciDocsRR": 72.78, + "StackOverflowDupQuestions": 48.48 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "msmarco-bert-co-condensor", + "ArguAna": 45.15, + "CQADupstackRetrieval": 27.72, + "ClimateFEVER": 16.96, + "DBPedia": 27.86, + "FEVER": 45.68, + "FiQA2018": 15.62, + "HotpotQA": 35.61, + "MSMARCO": 29.57, + "NFCorpus": 22.29, + "NQ": 29.85, + "QuoraRetrieval": 86.51, + "SCIDOCS": 10.13, + "SciFact": 52.31, + "TRECCOVID": 40.54, + "Touche2020": 8.57 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "msmarco-bert-co-condensor", + "BIOSSES": 77.32, + "SICK-R": 72.0, + "STS12": 68.19, + "STS13": 80.4, + "STS14": 74.02, + "STS15": 82.57, + "STS16": 79.78, + "STS17 (en-en)": 85.94, + "STS22 (en)": 67.54, + "STSBenchmark": 76.97 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "msmarco-bert-co-condensor", + "SummEval": 29.5 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "msmarco-bert-co-condensor" + } + ] + } + }, + "electra-small-swedish-cased-discriminator": { + "BitextMining": { + "f1": [ + { + "Model": "electra-small-swedish-cased-discriminator", + "BornholmBitextMining": 0.85 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "electra-small-swedish-cased-discriminator", + "AngryTweetsClassification": 40.52, + "DKHateClassification": 52.28, + "DanishPoliticalCommentsClassification": 25.17, + "LccSentimentClassification": 36.67, + "MassiveIntentClassification (da)": 6.51, + "MassiveIntentClassification (nb)": 5.66, + "MassiveIntentClassification (sv)": 6.6, + "MassiveScenarioClassification (da)": 11.5, + "MassiveScenarioClassification (nb)": 11.26, + "MassiveScenarioClassification (sv)": 12.16, + "NoRecClassification": 39.72, + "NordicLangClassification": 44.53, + "NorwegianParliament": 52.44, + "ScalaDaClassification": 51.66, + "ScalaNbClassification": 52.41 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "electra-small-swedish-cased-discriminator" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "electra-small-swedish-cased-discriminator" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "electra-small-swedish-cased-discriminator" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "electra-small-swedish-cased-discriminator" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "electra-small-swedish-cased-discriminator" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "electra-small-swedish-cased-discriminator" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "electra-small-swedish-cased-discriminator" + } + ] + } + }, + "bm25s": { + "BitextMining": { + "f1": [ + { + "Model": "bm25s" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "bm25s" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "bm25s" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "bm25s" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "bm25s" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "bm25s", + "ArguAna": 49.28, + "CQADupstackRetrieval": 31.86, + "ClimateFEVER": 13.62, + "DBPedia": 29.91, + "FEVER": 48.09, + "FiQA2018": 25.14, + "HotpotQA": 56.91, + "MSMARCO": 21.89, + "NFCorpus": 32.08, + "NQ": 28.5, + "QuoraRetrieval": 80.42, + "SCIDOCS": 15.78, + "SciFact": 68.7, + "TRECCOVID": 62.31, + "Touche2020": 33.05 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "bm25s" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "bm25s" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "bm25s" + } + ] + } + }, + "LLM2Vec-Sheared-Llama-unsupervised": { + "BitextMining": { + "f1": [ + { + "Model": "LLM2Vec-Sheared-Llama-unsupervised" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "LLM2Vec-Sheared-Llama-unsupervised", + "AmazonCounterfactualClassification (en)": 72.93, + "AmazonPolarityClassification": 74.28, + "AmazonReviewsClassification (en)": 36.14, + "Banking77Classification": 79.0, + "EmotionClassification": 42.85, + "ImdbClassification": 71.92, + "MTOPDomainClassification (en)": 91.24, + "MTOPIntentClassification (en)": 74.08, + "MassiveIntentClassification (en)": 69.99, + "MassiveScenarioClassification (en)": 75.15, + "ToxicConversationsClassification": 68.4, + "TweetSentimentExtractionClassification": 56.08 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "LLM2Vec-Sheared-Llama-unsupervised", + "ArxivClusteringP2P": 42.92, + "ArxivClusteringS2S": 35.2, + "BiorxivClusteringP2P": 35.02, + "BiorxivClusteringS2S": 27.21, + "MedrxivClusteringP2P": 30.15, + "MedrxivClusteringS2S": 26.96, + "RedditClustering": 38.67, + "RedditClusteringP2P": 53.42, + "StackExchangeClustering": 59.35, + "StackExchangeClusteringP2P": 31.47, + "TwentyNewsgroupsClustering": 31.54 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "LLM2Vec-Sheared-Llama-unsupervised", + "SprintDuplicateQuestions": 77.36, + "TwitterSemEval2015": 61.54, + "TwitterURLCorpus": 77.73 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "LLM2Vec-Sheared-Llama-unsupervised", + "AskUbuntuDupQuestions": 52.7, + "MindSmallReranking": 29.52, + "SciDocsRR": 67.76, + "StackOverflowDupQuestions": 40.82 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "LLM2Vec-Sheared-Llama-unsupervised", + "ArguAna": 43.64, + "CQADupstackRetrieval": 18.5, + "ClimateFEVER": 18.95, + "DBPedia": 13.21, + "FEVER": 16.96, + "FiQA2018": 16.99, + "HotpotQA": 22.64, + "MSMARCO": 7.03, + "NFCorpus": 15.73, + "NQ": 17.96, + "QuoraRetrieval": 78.23, + "SCIDOCS": 5.53, + "SciFact": 38.31, + "TRECCOVID": 56.04, + "Touche2020": 19.17 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "LLM2Vec-Sheared-Llama-unsupervised", + "BIOSSES": 75.12, + "SICK-R": 69.34, + "STS12": 60.09, + "STS13": 72.52, + "STS14": 66.7, + "STS15": 77.69, + "STS16": 75.94, + "STS17 (en-en)": 81.67, + "STS22 (en)": 63.7, + "STSBenchmark": 73.36 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "LLM2Vec-Sheared-Llama-unsupervised", + "SummEval": 31.23 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "LLM2Vec-Sheared-Llama-unsupervised" + } + ] + } + }, + "voyage-large-2-instruct": { + "BitextMining": { + "f1": [ + { + "Model": "voyage-large-2-instruct" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "voyage-large-2-instruct", + "AmazonCounterfactualClassification (en)": 77.6, + "AmazonPolarityClassification": 96.58, + "AmazonReviewsClassification (en)": 50.77, + "Banking77Classification": 86.96, + "EmotionClassification": 59.81, + "ImdbClassification": 96.13, + "MTOPDomainClassification (en)": 98.86, + "MTOPIntentClassification (en)": 86.97, + "MassiveIntentClassification (en)": 81.08, + "MassiveScenarioClassification (en)": 87.95, + "ToxicConversationsClassification": 83.58, + "TweetSentimentExtractionClassification": 71.55 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "voyage-large-2-instruct", + "ArxivClusteringP2P": 51.81, + "ArxivClusteringS2S": 44.73, + "BiorxivClusteringP2P": 46.07, + "BiorxivClusteringS2S": 40.64, + "MedrxivClusteringP2P": 42.94, + "MedrxivClusteringS2S": 41.44, + "RedditClustering": 68.5, + "RedditClusteringP2P": 64.86, + "StackExchangeClustering": 74.16, + "StackExchangeClusteringP2P": 45.1, + "TwentyNewsgroupsClustering": 66.62 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "voyage-large-2-instruct", + "SprintDuplicateQuestions": 94.5, + "TwitterSemEval2015": 86.32, + "TwitterURLCorpus": 86.9 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "voyage-large-2-instruct", + "AskUbuntuDupQuestions": 64.92, + "MindSmallReranking": 30.97, + "SciDocsRR": 89.34, + "StackOverflowDupQuestions": 55.11 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "voyage-large-2-instruct", + "ArguAna": 64.06, + "BrightRetrieval (theoremqa_questions)": 26.06, + "BrightRetrieval (earth_science)": 25.09, + "BrightRetrieval (leetcode)": 30.6, + "BrightRetrieval (economics)": 19.85, + "BrightRetrieval (robotics)": 11.21, + "BrightRetrieval (psychology)": 24.79, + "BrightRetrieval (aops)": 7.45, + "BrightRetrieval (sustainable_living)": 15.58, + "BrightRetrieval (pony)": 1.48, + "BrightRetrieval (theoremqa_theorems)": 10.13, + "BrightRetrieval (biology)": 23.55, + "BrightRetrieval (stackoverflow)": 15.03, + "CQADupstackRetrieval": 46.6, + "ClimateFEVER": 32.65, + "DBPedia": 46.03, + "FEVER": 91.47, + "FiQA2018": 59.76, + "HotpotQA": 70.86, + "MSMARCO": 40.6, + "NFCorpus": 40.32, + "NQ": 65.92, + "QuoraRetrieval": 87.4, + "SCIDOCS": 24.32, + "SciFact": 79.99, + "TRECCOVID": 85.07, + "Touche2020": 39.16 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "voyage-large-2-instruct", + "BIOSSES": 89.24, + "SICK-R": 83.16, + "STS12": 73.34, + "STS13": 88.49, + "STS14": 86.49, + "STS15": 91.13, + "STS16": 85.68, + "STS17 (en-en)": 90.06, + "STS22 (en)": 66.32, + "STSBenchmark": 89.22 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "voyage-large-2-instruct", + "SummEval": 30.84 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "voyage-large-2-instruct" + } + ] + } + }, + "contriever-instruct": { + "BitextMining": { + "f1": [ + { + "Model": "contriever-instruct" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "contriever-instruct" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "contriever-instruct" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "contriever-instruct" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "contriever-instruct" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "contriever-instruct", + "ARCChallenge": 7.63, + "AlphaNLI": 27.09, + "PIQA": 21.73, + "Quail": 4.92, + "RARbCode": 7.12, + "RARbMath": 21.83, + "SIQA": 0.88, + "SpartQA": 10.56, + "TempReasonL1": 1.8, + "TempReasonL2Fact": 22.03, + "TempReasonL2Pure": 0.94, + "TempReasonL3Fact": 20.82, + "TempReasonL3Pure": 7.15, + "WinoGrande": 26.3 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "contriever-instruct" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "contriever-instruct" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "contriever-instruct" + } + ] + } + }, + "dragon-plus": { + "BitextMining": { + "f1": [ + { + "Model": "dragon-plus" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "dragon-plus" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "dragon-plus" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "dragon-plus" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "dragon-plus" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "dragon-plus", + "ARCChallenge": 8.91, + "AlphaNLI": 32.1, + "HellaSwag": 27.69, + "PIQA": 28.01, + "Quail": 4.09, + "RARbCode": 17.58, + "RARbMath": 45.09, + "SIQA": 2.0, + "SpartQA": 10.34, + "TempReasonL1": 1.82, + "TempReasonL2Fact": 17.45, + "TempReasonL2Pure": 0.55, + "TempReasonL3Fact": 15.71, + "TempReasonL3Pure": 7.97, + "WinoGrande": 67.18 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "dragon-plus" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "dragon-plus" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "dragon-plus" + } + ] + } + }, + "xlm-roberta-large": { + "BitextMining": { + "f1": [ + { + "Model": "xlm-roberta-large" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "xlm-roberta-large", + "AmazonReviewsClassification (fr)": 26.62, + "MTOPDomainClassification (fr)": 36.77, + "MTOPIntentClassification (fr)": 15.37, + "MasakhaNEWSClassification (fra)": 65.76, + "MassiveIntentClassification (fr)": 15.82, + "MassiveScenarioClassification (fr)": 23.92 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "xlm-roberta-large", + "AlloProfClusteringP2P": 56.54, + "AlloProfClusteringS2S": 21.18, + "BlurbsClusteringP2P": 29.84, + "BlurbsClusteringS2S": 7.29, + "HALClusteringS2S": 5.94, + "MLSUMClusteringP2P": 42.67, + "MLSUMClusteringS2S": 18.5, + "MasakhaNEWSClusteringP2P (fra)": 34.02, + "MasakhaNEWSClusteringS2S (fra)": 21.52, + "TenKGnadClusteringP2P": 32.46, + "TenKGnadClusteringS2S": 6.16 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "xlm-roberta-large", + "OpusparcusPC (fr)": 83.73, + "PawsXPairClassification (fr)": 53.38 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "xlm-roberta-large", + "AlloprofReranking": 28.62, + "SyntecReranking": 49.4 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "xlm-roberta-large", + "AlloprofRetrieval": 0.52, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 0.9, + "SyntecRetrieval": 6.6, + "XPQARetrieval (fr)": 12.7 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "xlm-roberta-large", + "SICKFr": 50.01, + "STS22 (fr)": 55.49, + "STSBenchmarkMultilingualSTS (fr)": 42.32 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "xlm-roberta-large", + "SummEvalFr": 28.89 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "xlm-roberta-large" + } + ] + } + }, + "bert-base-15lang-cased": { + "BitextMining": { + "f1": [ + { + "Model": "bert-base-15lang-cased" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "bert-base-15lang-cased", + "AmazonReviewsClassification (fr)": 29.35, + "MTOPDomainClassification (fr)": 63.7, + "MTOPIntentClassification (fr)": 37.85, + "MasakhaNEWSClassification (fra)": 63.89, + "MassiveIntentClassification (fr)": 37.28, + "MassiveScenarioClassification (fr)": 44.47 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "bert-base-15lang-cased", + "AlloProfClusteringP2P": 53.16, + "AlloProfClusteringS2S": 43.43, + "HALClusteringS2S": 20.26, + "MLSUMClusteringP2P": 41.22, + "MLSUMClusteringS2S": 31.88, + "MasakhaNEWSClusteringP2P (fra)": 24.23, + "MasakhaNEWSClusteringS2S (fra)": 24.46 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "bert-base-15lang-cased", + "OpusparcusPC (fr)": 86.78, + "PawsXPairClassification (fr)": 53.38 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "bert-base-15lang-cased", + "AlloprofReranking": 36.21, + "SyntecReranking": 53.25 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "bert-base-15lang-cased", + "AlloprofRetrieval": 1.61, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 3.55, + "SyntecRetrieval": 18.95, + "XPQARetrieval (fr)": 18.35 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "bert-base-15lang-cased", + "SICKFr": 58.77, + "STS22 (fr)": 40.4, + "STSBenchmarkMultilingualSTS (fr)": 52.25 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "bert-base-15lang-cased", + "SummEvalFr": 29.13 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "bert-base-15lang-cased" + } + ] + } + }, + "contriever-base-msmarco": { + "BitextMining": { + "f1": [ + { + "Model": "contriever-base-msmarco" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "contriever-base-msmarco", + "AmazonCounterfactualClassification (en)": 72.19, + "AmazonPolarityClassification": 68.63, + "AmazonReviewsClassification (en)": 37.42, + "Banking77Classification": 80.02, + "EmotionClassification": 44.77, + "ImdbClassification": 67.04, + "MTOPDomainClassification (en)": 93.18, + "MTOPIntentClassification (en)": 69.31, + "MassiveIntentClassification (en)": 67.78, + "MassiveScenarioClassification (en)": 76.0, + "ToxicConversationsClassification": 67.77, + "TweetSentimentExtractionClassification": 56.1 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "contriever-base-msmarco", + "ArxivClusteringP2P": 42.61, + "ArxivClusteringS2S": 32.32, + "BiorxivClusteringP2P": 34.97, + "BiorxivClusteringS2S": 29.08, + "MedrxivClusteringP2P": 31.19, + "MedrxivClusteringS2S": 27.27, + "RedditClustering": 54.89, + "RedditClusteringP2P": 57.58, + "StackExchangeClustering": 63.15, + "StackExchangeClusteringP2P": 32.25, + "TwentyNewsgroupsClustering": 46.82 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "contriever-base-msmarco", + "SprintDuplicateQuestions": 95.55, + "TwitterSemEval2015": 66.85, + "TwitterURLCorpus": 85.21 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "contriever-base-msmarco", + "AskUbuntuDupQuestions": 56.69, + "MindSmallReranking": 31.58, + "SciDocsRR": 76.51, + "StackOverflowDupQuestions": 47.78 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "contriever-base-msmarco", + "ArguAna": 48.32, + "CQADupstackRetrieval": 33.67, + "ClimateFEVER": 24.79, + "DBPedia": 38.1, + "FEVER": 59.29, + "FiQA2018": 27.42, + "HotpotQA": 56.81, + "MSMARCO": 36.77, + "NFCorpus": 31.32, + "NQ": 41.83, + "QuoraRetrieval": 86.72, + "SCIDOCS": 17.12, + "SciFact": 65.51, + "TRECCOVID": 44.77, + "Touche2020": 15.79 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "contriever-base-msmarco", + "BIOSSES": 83.32, + "SICK-R": 70.2, + "STS12": 64.34, + "STS13": 80.03, + "STS14": 74.51, + "STS15": 83.3, + "STS16": 79.67, + "STS17 (en-en)": 86.32, + "STS22 (en)": 64.64, + "STSBenchmark": 78.81 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "contriever-base-msmarco", + "SummEval": 30.36 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "contriever-base-msmarco", + "Core17InstructionRetrieval": -2.48, + "News21InstructionRetrieval": -2.83, + "Robust04InstructionRetrieval": -6.12 + } + ] + } + }, + "use-cmlm-multilingual": { + "BitextMining": { + "f1": [ + { + "Model": "use-cmlm-multilingual" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "use-cmlm-multilingual" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "use-cmlm-multilingual", + "BlurbsClusteringP2P": 29.63, + "BlurbsClusteringS2S": 15.24, + "TenKGnadClusteringP2P": 37.1, + "TenKGnadClusteringS2S": 25.64 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "use-cmlm-multilingual" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "use-cmlm-multilingual" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "use-cmlm-multilingual" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "use-cmlm-multilingual" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "use-cmlm-multilingual" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "use-cmlm-multilingual" + } + ] + } + }, + "mistral-embed": { + "BitextMining": { + "f1": [ + { + "Model": "mistral-embed" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "mistral-embed", + "AmazonReviewsClassification (fr)": 41.59, + "MTOPDomainClassification (fr)": 90.05, + "MTOPIntentClassification (fr)": 66.09, + "MasakhaNEWSClassification (fra)": 81.4, + "MassiveIntentClassification (fr)": 62.83, + "MassiveScenarioClassification (fr)": 69.71 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "mistral-embed", + "AlloProfClusteringP2P": 62.01, + "AlloProfClusteringS2S": 49.2, + "HALClusteringS2S": 26.17, + "MLSUMClusteringP2P": 45.28, + "MLSUMClusteringS2S": 42.74, + "MasakhaNEWSClusteringP2P (fra)": 48.13, + "MasakhaNEWSClusteringS2S (fra)": 39.62 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "mistral-embed", + "OpusparcusPC (fr)": 92.61, + "PawsXPairClassification (fr)": 62.02 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "mistral-embed", + "AlloprofReranking": 72.36, + "SyntecReranking": 88.57 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "mistral-embed", + "AILACasedocs": 38.2, + "AILAStatutes": 44.81, + "AlloprofRetrieval": 56.84, + "BSARDRetrieval": 2.48, + "GerDaLIRSmall": 17.85, + "LeCaRDv2": 61.12, + "LegalBenchConsumerContractsQA": 80.8, + "LegalBenchCorporateLobbying": 94.11, + "LegalQuAD": 47.17, + "LegalSummarization": 67.39, + "MintakaRetrieval (fr)": 21.73, + "SyntecRetrieval": 78.77, + "XPQARetrieval (fr)": 74.24 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "mistral-embed", + "SICKFr": 76.21, + "STS22 (fr)": 82.74, + "STSBenchmarkMultilingualSTS (fr)": 79.72 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "mistral-embed", + "SummEvalFr": 31.47 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "mistral-embed" + } + ] + } + }, + "universal-sentence-encoder-multilingual-large-3": { + "BitextMining": { + "f1": [ + { + "Model": "universal-sentence-encoder-multilingual-large-3" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "universal-sentence-encoder-multilingual-large-3", + "AmazonReviewsClassification (fr)": 35.09, + "MTOPDomainClassification (fr)": 88.19, + "MTOPIntentClassification (fr)": 63.64, + "MasakhaNEWSClassification (fra)": 72.04, + "MassiveIntentClassification (fr)": 65.8, + "MassiveScenarioClassification (fr)": 73.47 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "universal-sentence-encoder-multilingual-large-3", + "AlloProfClusteringP2P": 54.21, + "AlloProfClusteringS2S": 37.95, + "HALClusteringS2S": 18.94, + "MLSUMClusteringP2P": 41.02, + "MLSUMClusteringS2S": 37.97, + "MasakhaNEWSClusteringP2P (fra)": 24.09, + "MasakhaNEWSClusteringS2S (fra)": 40.24 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "universal-sentence-encoder-multilingual-large-3", + "OpusparcusPC (fr)": 93.38, + "PawsXPairClassification (fr)": 53.62 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "universal-sentence-encoder-multilingual-large-3", + "AlloprofReranking": 55.39, + "SyntecReranking": 77.13 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "universal-sentence-encoder-multilingual-large-3", + "AlloprofRetrieval": 33.78, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 26.21, + "SyntecRetrieval": 63.69, + "XPQARetrieval (fr)": 65.21 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "universal-sentence-encoder-multilingual-large-3", + "SICKFr": 74.39, + "STS22 (fr)": 71.11, + "STSBenchmarkMultilingualSTS (fr)": 78.16 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "universal-sentence-encoder-multilingual-large-3", + "SummEvalFr": 28.56 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "universal-sentence-encoder-multilingual-large-3" + } + ] + } + }, + "Cohere-embed-multilingual-light-v3.0": { + "BitextMining": { + "f1": [ + { + "Model": "Cohere-embed-multilingual-light-v3.0" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "Cohere-embed-multilingual-light-v3.0", + "AmazonReviewsClassification (fr)": 38.6, + "MTOPDomainClassification (fr)": 80.79, + "MTOPIntentClassification (fr)": 50.01, + "MasakhaNEWSClassification (fra)": 82.58, + "MassiveIntentClassification (fr)": 56.31, + "MassiveScenarioClassification (fr)": 59.5 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "Cohere-embed-multilingual-light-v3.0", + "AlloProfClusteringP2P": 61.96, + "AlloProfClusteringS2S": 31.36, + "HALClusteringS2S": 17.31, + "MLSUMClusteringP2P": 42.8, + "MLSUMClusteringS2S": 32.72, + "MasakhaNEWSClusteringP2P (fra)": 56.81, + "MasakhaNEWSClusteringS2S (fra)": 29.41 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "Cohere-embed-multilingual-light-v3.0", + "OpusparcusPC (fr)": 90.92, + "PawsXPairClassification (fr)": 57.32 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "Cohere-embed-multilingual-light-v3.0", + "AlloprofReranking": 51.6, + "SyntecReranking": 88.03 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "Cohere-embed-multilingual-light-v3.0", + "AlloprofRetrieval": 35.39, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 23.0, + "SyntecRetrieval": 76.88, + "XPQARetrieval (fr)": 45.23 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "Cohere-embed-multilingual-light-v3.0", + "SICKFr": 75.5, + "STS22 (fr)": 82.8, + "STSBenchmarkMultilingualSTS (fr)": 76.48 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "Cohere-embed-multilingual-light-v3.0", + "SummEvalFr": 31.4 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "Cohere-embed-multilingual-light-v3.0" + } + ] + } + }, + "bert-base-multilingual-cased": { + "BitextMining": { + "f1": [ + { + "Model": "bert-base-multilingual-cased" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "bert-base-multilingual-cased", + "AmazonReviewsClassification (fr)": 29.39, + "MTOPDomainClassification (fr)": 63.61, + "MTOPIntentClassification (fr)": 37.84, + "MasakhaNEWSClassification (fra)": 64.0, + "MassiveIntentClassification (fr)": 37.3, + "MassiveScenarioClassification (fr)": 44.47 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "bert-base-multilingual-cased", + "AlloProfClusteringP2P": 51.5, + "AlloProfClusteringS2S": 43.06, + "HALClusteringS2S": 20.81, + "MLSUMClusteringP2P": 40.9, + "MLSUMClusteringS2S": 31.8, + "MasakhaNEWSClusteringP2P (fra)": 24.23, + "MasakhaNEWSClusteringS2S (fra)": 24.46 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "bert-base-multilingual-cased", + "OpusparcusPC (fr)": 86.77, + "PawsXPairClassification (fr)": 53.39 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "bert-base-multilingual-cased", + "AlloprofReranking": 36.23, + "SyntecReranking": 53.25 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "bert-base-multilingual-cased", + "AlloprofRetrieval": 1.63, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 3.55, + "SyntecRetrieval": 18.95, + "XPQARetrieval (fr)": 18.49 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "bert-base-multilingual-cased", + "SICKFr": 58.75, + "STS22 (fr)": 39.05, + "STSBenchmarkMultilingualSTS (fr)": 52.25 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "bert-base-multilingual-cased", + "SummEvalFr": 28.81 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "bert-base-multilingual-cased" + } + ] + } + }, + "gte-Qwen2-7B-instruct": { + "BitextMining": { + "f1": [ + { + "Model": "gte-Qwen2-7B-instruct" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "gte-Qwen2-7B-instruct" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "gte-Qwen2-7B-instruct" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "gte-Qwen2-7B-instruct" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "gte-Qwen2-7B-instruct" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "gte-Qwen2-7B-instruct", + "BrightRetrieval (earth_science)": 40.66, + "BrightRetrieval (sustainable_living)": 20.82, + "BrightRetrieval (theoremqa_theorems)": 28.15, + "BrightRetrieval (aops)": 15.1, + "BrightRetrieval (economics)": 16.18, + "BrightRetrieval (pony)": 1.25, + "BrightRetrieval (stackoverflow)": 13.95, + "BrightRetrieval (leetcode)": 31.07, + "BrightRetrieval (biology)": 32.09, + "BrightRetrieval (theoremqa_questions)": 29.9, + "BrightRetrieval (robotics)": 12.82, + "BrightRetrieval (psychology)": 26.58 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "gte-Qwen2-7B-instruct" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "gte-Qwen2-7B-instruct" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "gte-Qwen2-7B-instruct" + } + ] + } + }, + "e5-base-v2": { + "BitextMining": { + "f1": [ + { + "Model": "e5-base-v2" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "e5-base-v2" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "e5-base-v2", + "BiorxivClusteringP2P": 37.12, + "BiorxivClusteringS2S": 33.41, + "MedrxivClusteringP2P": 31.82, + "MedrxivClusteringS2S": 29.68, + "RedditClustering": 56.54, + "RedditClusteringP2P": 63.23, + "StackExchangeClustering": 64.6, + "StackExchangeClusteringP2P": 33.02, + "TwentyNewsgroupsClustering": 49.86 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "e5-base-v2" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "e5-base-v2" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "e5-base-v2" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "e5-base-v2" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "e5-base-v2" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "e5-base-v2", + "Core17InstructionRetrieval": -2.9, + "News21InstructionRetrieval": -2.0, + "Robust04InstructionRetrieval": -6.73 + } + ] + } + }, + "elser-v2": { + "BitextMining": { + "f1": [ + { + "Model": "elser-v2" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "elser-v2", + "AmazonCounterfactualClassification (en)": 74.16, + "AmazonPolarityClassification": 61.91, + "AmazonReviewsClassification (en)": 32.06, + "Banking77Classification": 82.05, + "EmotionClassification": 46.65, + "ImdbClassification": 65.02, + "MTOPDomainClassification (en)": 93.17, + "MTOPIntentClassification (en)": 71.1, + "MassiveIntentClassification (en)": 68.48, + "MassiveScenarioClassification (en)": 74.98, + "ToxicConversationsClassification": 68.15, + "TweetSentimentExtractionClassification": 53.57 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "elser-v2", + "ArxivClusteringP2P": 35.27, + "ArxivClusteringS2S": 23.18, + "BiorxivClusteringP2P": 31.13, + "BiorxivClusteringS2S": 26.78, + "MedrxivClusteringP2P": 24.65, + "MedrxivClusteringS2S": 24.21, + "RedditClustering": 38.74, + "RedditClusteringP2P": 51.92, + "StackExchangeClustering": 42.7, + "StackExchangeClusteringP2P": 28.7, + "TwentyNewsgroupsClustering": 27.82 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "elser-v2", + "SprintDuplicateQuestions": 94.53, + "TwitterSemEval2015": 64.41, + "TwitterURLCorpus": 85.01 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "elser-v2", + "AskUbuntuDupQuestions": 58.31, + "MindSmallReranking": 30.75, + "SciDocsRR": 75.62, + "StackOverflowDupQuestions": 48.4 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "elser-v2", + "ArguAna": 55.98, + "CQADupstackRetrieval": 34.27, + "ClimateFEVER": 27.08, + "DBPedia": 42.7, + "FEVER": 78.55, + "FiQA2018": 41.57, + "HotpotQA": 67.01, + "MSMARCO": 38.9, + "NFCorpus": 36.66, + "NQ": 55.84, + "QuoraRetrieval": 84.69, + "SCIDOCS": 16.24, + "SciFact": 71.8, + "TRECCOVID": 72.72, + "Touche2020": 26.27 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "elser-v2", + "BIOSSES": 83.79, + "SICK-R": 68.78, + "STS12": 64.81, + "STS13": 80.1, + "STS14": 74.96, + "STS15": 83.7, + "STS16": 80.55, + "STS17 (en-en)": 85.74, + "STS22 (en)": 67.5, + "STSBenchmark": 79.54 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "elser-v2", + "SummEval": 31.03 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "elser-v2" + } + ] + } + }, + "gtr-t5-base": { + "BitextMining": { + "f1": [ + { + "Model": "gtr-t5-base" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "gtr-t5-base", + "AmazonCounterfactualClassification (en)": 69.33, + "AmazonPolarityClassification": 67.82, + "AmazonReviewsClassification (en)": 38.48, + "Banking77Classification": 79.26, + "EmotionClassification": 42.2, + "ImdbClassification": 65.99, + "MTOPDomainClassification (en)": 92.42, + "MTOPIntentClassification (en)": 62.44, + "MassiveIntentClassification (en)": 67.05, + "MassiveScenarioClassification (en)": 75.4, + "ToxicConversationsClassification": 66.6, + "TweetSentimentExtractionClassification": 56.02 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "gtr-t5-base", + "ArxivClusteringP2P": 35.49, + "ArxivClusteringS2S": 27.18, + "BiorxivClusteringP2P": 27.66, + "BiorxivClusteringS2S": 23.25, + "MedrxivClusteringP2P": 27.57, + "MedrxivClusteringS2S": 25.13, + "RedditClustering": 56.13, + "RedditClusteringP2P": 58.53, + "StackExchangeClustering": 64.21, + "StackExchangeClusteringP2P": 33.01, + "TwentyNewsgroupsClustering": 46.72 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "gtr-t5-base", + "SprintDuplicateQuestions": 94.55, + "TwitterSemEval2015": 72.23, + "TwitterURLCorpus": 84.77 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "gtr-t5-base", + "AskUbuntuDupQuestions": 60.86, + "MindSmallReranking": 31.33, + "SciDocsRR": 73.71, + "StackOverflowDupQuestions": 51.01 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "gtr-t5-base", + "ArguAna": 50.83, + "CQADupstackRetrieval": 34.55, + "ClimateFEVER": 24.88, + "DBPedia": 35.24, + "FEVER": 68.93, + "FiQA2018": 35.15, + "HotpotQA": 54.93, + "MSMARCO": 41.16, + "NFCorpus": 30.22, + "NQ": 50.47, + "QuoraRetrieval": 87.98, + "SCIDOCS": 14.0, + "SciFact": 59.74, + "TRECCOVID": 56.05, + "Touche2020": 25.89 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "gtr-t5-base", + "BIOSSES": 79.0, + "SICK-R": 71.45, + "STS12": 68.59, + "STS13": 79.09, + "STS14": 74.64, + "STS15": 84.85, + "STS16": 81.57, + "STS17 (en-en)": 85.8, + "STS22 (en)": 66.17, + "STSBenchmark": 79.58 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "gtr-t5-base", + "SummEval": 29.67 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "gtr-t5-base" + } + ] + } + }, + "silver-retriever-base-v1": { + "BitextMining": { + "f1": [ + { + "Model": "silver-retriever-base-v1" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "silver-retriever-base-v1", + "AllegroReviews": 33.35, + "CBD": 68.51, + "MassiveIntentClassification (pl)": 66.63, + "MassiveScenarioClassification (pl)": 69.97, + "PAC": 66.26, + "PolEmo2.0-IN": 63.52, + "PolEmo2.0-OUT": 44.7 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "silver-retriever-base-v1", + "8TagsClustering": 31.49 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "silver-retriever-base-v1", + "CDSC-E": 67.35, + "PPC": 85.33, + "PSC": 98.46, + "SICK-E-PL": 58.19 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "silver-retriever-base-v1" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "silver-retriever-base-v1", + "ArguAna-PL": 44.12, + "DBPedia-PL": 26.32, + "FiQA-PL": 24.95, + "HotpotQA-PL": 45.13, + "MSMARCO-PL": 25.47, + "NFCorpus-PL": 28.55, + "NQ-PL": 37.9, + "Quora-PL": 77.98, + "SCIDOCS-PL": 10.9, + "SciFact-PL": 54.44, + "TRECCOVID-PL": 46.98 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "silver-retriever-base-v1", + "CDSC-R": 89.09, + "SICK-R-PL": 67.26, + "STS22 (pl)": 38.69 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "silver-retriever-base-v1" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "silver-retriever-base-v1" + } + ] + } + }, + "bm25": { + "BitextMining": { + "f1": [ + { + "Model": "bm25" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "bm25" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "bm25" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "bm25" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "bm25" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "bm25", + "BrightRetrieval (robotics)": 13.53, + "BrightRetrieval (pony)": 7.93, + "BrightRetrieval (leetcode)": 24.37, + "BrightRetrieval (earth_science)": 27.06, + "BrightRetrieval (stackoverflow)": 16.55, + "BrightRetrieval (economics)": 14.87, + "BrightRetrieval (theoremqa_questions)": 9.78, + "BrightRetrieval (theoremqa_theorems)": 4.25, + "BrightRetrieval (psychology)": 12.51, + "BrightRetrieval (sustainable_living)": 15.22, + "BrightRetrieval (biology)": 19.19, + "BrightRetrieval (aops)": 6.2 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "bm25" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "bm25" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "bm25", + "Core17InstructionRetrieval": -1.06, + "News21InstructionRetrieval": -2.15, + "Robust04InstructionRetrieval": -3.06 + } + ] + } + }, + "bge-large-zh-noinstruct": { + "BitextMining": { + "f1": [ + { + "Model": "bge-large-zh-noinstruct" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "bge-large-zh-noinstruct", + "AmazonReviewsClassification (zh)": 41.94, + "IFlyTek": 45.32, + "JDReview": 85.38, + "MassiveIntentClassification (zh-CN)": 66.96, + "MassiveScenarioClassification (zh-CN)": 73.39, + "MultilingualSentiment": 73.7, + "OnlineShopping": 91.66, + "TNews": 52.05, + "Waimai": 86.83 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "bge-large-zh-noinstruct", + "CLSClusteringP2P": 41.23, + "CLSClusteringS2S": 40.04, + "ThuNewsClusteringP2P": 62.03, + "ThuNewsClusteringS2S": 56.75 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "bge-large-zh-noinstruct", + "Cmnli": 82.17, + "Ocnli": 71.37 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "bge-large-zh-noinstruct", + "CMedQAv1": 81.72, + "CMedQAv2": 84.64, + "MMarcoReranking": 27.1, + "T2Reranking": 66.16 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "bge-large-zh-noinstruct", + "CmedqaRetrieval": 41.03, + "CovidRetrieval": 75.07, + "DuRetrieval": 84.68, + "EcomRetrieval": 65.6, + "MMarcoRetrieval": 81.38, + "MedicalRetrieval": 58.28, + "T2Retrieval": 84.39, + "VideoRetrieval": 73.93 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "bge-large-zh-noinstruct", + "AFQMC": 43.06, + "ATEC": 48.29, + "BQ": 60.53, + "LCQMC": 74.71, + "PAWSX": 16.64, + "QBQTC": 35.2, + "STS22 (zh)": 67.19, + "STSB": 78.41 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "bge-large-zh-noinstruct" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "bge-large-zh-noinstruct" + } + ] + } + }, + "bert-base-multilingual-uncased": { + "BitextMining": { + "f1": [ + { + "Model": "bert-base-multilingual-uncased" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "bert-base-multilingual-uncased", + "AmazonReviewsClassification (fr)": 29.02, + "MTOPDomainClassification (fr)": 64.49, + "MTOPIntentClassification (fr)": 39.4, + "MasakhaNEWSClassification (fra)": 75.69, + "MassiveIntentClassification (fr)": 38.01, + "MassiveScenarioClassification (fr)": 43.63 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "bert-base-multilingual-uncased", + "AlloProfClusteringP2P": 60.66, + "AlloProfClusteringS2S": 35.05, + "HALClusteringS2S": 20.9, + "MLSUMClusteringP2P": 43.5, + "MLSUMClusteringS2S": 30.99, + "MasakhaNEWSClusteringP2P (fra)": 49.71, + "MasakhaNEWSClusteringS2S (fra)": 42.23 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "bert-base-multilingual-uncased", + "OpusparcusPC (fr)": 87.43, + "PawsXPairClassification (fr)": 53.22 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "bert-base-multilingual-uncased", + "AlloprofReranking": 38.85, + "SyntecReranking": 66.4 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "bert-base-multilingual-uncased", + "AlloprofRetrieval": 5.51, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 2.87, + "SyntecRetrieval": 34.95, + "XPQARetrieval (fr)": 26.12 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "bert-base-multilingual-uncased", + "SICKFr": 58.26, + "STS22 (fr)": 56.47, + "STSBenchmarkMultilingualSTS (fr)": 54.97 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "bert-base-multilingual-uncased", + "SummEvalFr": 30.72 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "bert-base-multilingual-uncased" + } + ] + } + }, + "bge-large-en-v1.5-instruct": { + "BitextMining": { + "f1": [ + { + "Model": "bge-large-en-v1.5-instruct" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "bge-large-en-v1.5-instruct" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "bge-large-en-v1.5-instruct" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "bge-large-en-v1.5-instruct" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "bge-large-en-v1.5-instruct" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "bge-large-en-v1.5-instruct", + "ARCChallenge": 8.86, + "AlphaNLI": 0.86, + "HellaSwag": 26.24, + "PIQA": 23.26, + "Quail": 2.72, + "RARbCode": 45.25, + "RARbMath": 49.82, + "SIQA": 0.59, + "SpartQA": 2.34, + "TempReasonL1": 1.17, + "TempReasonL2Fact": 21.19, + "TempReasonL2Pure": 2.1, + "TempReasonL3Fact": 17.59, + "TempReasonL3Pure": 5.99, + "WinoGrande": 10.31 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "bge-large-en-v1.5-instruct" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "bge-large-en-v1.5-instruct" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "bge-large-en-v1.5-instruct" + } + ] + } + }, + "Cohere-embed-english-v3.0-instruct": { + "BitextMining": { + "f1": [ + { + "Model": "Cohere-embed-english-v3.0-instruct" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "Cohere-embed-english-v3.0-instruct" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "Cohere-embed-english-v3.0-instruct" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "Cohere-embed-english-v3.0-instruct" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "Cohere-embed-english-v3.0-instruct" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "Cohere-embed-english-v3.0-instruct", + "ARCChallenge": 10.1, + "AlphaNLI": 18.75, + "HellaSwag": 29.02, + "PIQA": 27.89, + "Quail": 7.77, + "RARbCode": 56.56, + "RARbMath": 72.05, + "SIQA": 5.03, + "SpartQA": 3.33, + "TempReasonL1": 1.43, + "TempReasonL2Fact": 40.46, + "TempReasonL2Pure": 2.39, + "TempReasonL3Fact": 33.87, + "TempReasonL3Pure": 7.52, + "WinoGrande": 65.02 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "Cohere-embed-english-v3.0-instruct" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "Cohere-embed-english-v3.0-instruct" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "Cohere-embed-english-v3.0-instruct" + } + ] + } + }, + "multi-qa-MiniLM-L6-cos-v1": { + "BitextMining": { + "f1": [ + { + "Model": "multi-qa-MiniLM-L6-cos-v1" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "multi-qa-MiniLM-L6-cos-v1", + "AmazonReviewsClassification (fr)": 27.05, + "MTOPDomainClassification (fr)": 72.97, + "MTOPIntentClassification (fr)": 37.18, + "MasakhaNEWSClassification (fra)": 75.62, + "MassiveIntentClassification (fr)": 42.64, + "MassiveScenarioClassification (fr)": 49.92 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "multi-qa-MiniLM-L6-cos-v1", + "AlloProfClusteringP2P": 49.13, + "AlloProfClusteringS2S": 26.16, + "HALClusteringS2S": 12.49, + "MLSUMClusteringP2P": 35.15, + "MLSUMClusteringS2S": 25.95, + "MasakhaNEWSClusteringP2P (fra)": 53.73, + "MasakhaNEWSClusteringS2S (fra)": 27.27 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "multi-qa-MiniLM-L6-cos-v1", + "OpusparcusPC (fr)": 88.07, + "PawsXPairClassification (fr)": 57.36 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "multi-qa-MiniLM-L6-cos-v1", + "AlloprofReranking": 40.28, + "SyntecReranking": 65.08 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "multi-qa-MiniLM-L6-cos-v1", + "AlloprofRetrieval": 30.23, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 16.31, + "SyntecRetrieval": 58.07, + "XPQARetrieval (fr)": 48.83 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "multi-qa-MiniLM-L6-cos-v1", + "SICKFr": 62.11, + "STS22 (fr)": 74.62, + "STSBenchmarkMultilingualSTS (fr)": 63.85 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "multi-qa-MiniLM-L6-cos-v1", + "SummEvalFr": 27.59 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "multi-qa-MiniLM-L6-cos-v1" + } + ] + } + }, + "contriever": { + "BitextMining": { + "f1": [ + { + "Model": "contriever" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "contriever" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "contriever" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "contriever" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "contriever" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "contriever", + "ARCChallenge": 8.62, + "AlphaNLI": 31.77, + "HellaSwag": 17.73, + "PIQA": 24.64, + "Quail": 4.97, + "RARbCode": 9.28, + "RARbMath": 30.76, + "SIQA": 1.27, + "SpartQA": 10.94, + "TempReasonL1": 1.93, + "TempReasonL2Fact": 22.68, + "TempReasonL2Pure": 1.12, + "TempReasonL3Fact": 20.62, + "TempReasonL3Pure": 7.8, + "WinoGrande": 47.15 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "contriever" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "contriever" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "contriever" + } + ] + } + }, + "e5-large": { + "BitextMining": { + "f1": [ + { + "Model": "e5-large", + "BornholmBitextMining": 40.15 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "e5-large", + "AngryTweetsClassification": 46.14, + "DKHateClassification": 58.72, + "DanishPoliticalCommentsClassification": 28.67, + "LccSentimentClassification": 42.13, + "MassiveIntentClassification (da)": 42.29, + "MassiveIntentClassification (nb)": 40.63, + "MassiveIntentClassification (sv)": 40.69, + "MassiveScenarioClassification (da)": 52.95, + "MassiveScenarioClassification (nb)": 51.91, + "MassiveScenarioClassification (sv)": 50.97, + "NoRecClassification": 41.83, + "NordicLangClassification": 58.3, + "NorwegianParliament": 57.26, + "ScalaDaClassification": 49.9, + "ScalaNbClassification": 50.13 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "e5-large" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "e5-large" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "e5-large" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "e5-large" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "e5-large" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "e5-large" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "e5-large" + } + ] + } + }, + "LaBSE-en-ru": { + "BitextMining": { + "f1": [ + { + "Model": "LaBSE-en-ru", + "Tatoeba (rus-Cyrl_eng-Latn)": 93.62 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "LaBSE-en-ru", + "GeoreviewClassification (rus-Cyrl)": 40.89, + "HeadlineClassification (rus-Cyrl)": 68.75, + "InappropriatenessClassification (rus-Cyrl)": 58.48, + "KinopoiskClassification (rus-Cyrl)": 49.85, + "MassiveIntentClassification (swa-Latn)": 19.98, + "MassiveIntentClassification (aze-Latn)": 19.52, + "MassiveIntentClassification (tur-Latn)": 24.12, + "MassiveIntentClassification (cmo-Hans)": 3.96, + "MassiveIntentClassification (amh-Ethi)": 2.76, + "MassiveIntentClassification (kan-Knda)": 2.86, + "MassiveIntentClassification (hin-Deva)": 3.29, + "MassiveIntentClassification (tgl-Latn)": 27.08, + "MassiveIntentClassification (tha-Thai)": 4.0, + "MassiveIntentClassification (swe-Latn)": 32.01, + "MassiveIntentClassification (deu-Latn)": 35.14, + "MassiveIntentClassification (spa-Latn)": 37.67, + "MassiveIntentClassification (por-Latn)": 39.84, + "MassiveIntentClassification (jpn-Jpan)": 4.78, + "MassiveIntentClassification (fin-Latn)": 31.11, + "MassiveIntentClassification (kat-Geor)": 2.87, + "MassiveIntentClassification (slv-Latn)": 35.66, + "MassiveIntentClassification (rus-Cyrl)": 60.53, + "MassiveIntentClassification (ita-Latn)": 43.32, + "MassiveIntentClassification (tel-Telu)": 2.72, + "MassiveIntentClassification (afr-Latn)": 30.59, + "MassiveIntentClassification (isl-Latn)": 25.61, + "MassiveIntentClassification (fas-Arab)": 3.71, + "MassiveIntentClassification (vie-Latn)": 23.0, + "MassiveIntentClassification (ben-Beng)": 3.35, + "MassiveIntentClassification (hye-Armn)": 2.8, + "MassiveIntentClassification (pol-Latn)": 31.3, + "MassiveIntentClassification (cym-Latn)": 26.59, + "MassiveIntentClassification (jav-Latn)": 26.84, + "MassiveIntentClassification (mon-Cyrl)": 35.97, + "MassiveIntentClassification (en)": 60.48, + "MassiveIntentClassification (msa-Latn)": 27.82, + "MassiveIntentClassification (nob-Latn)": 35.78, + "MassiveIntentClassification (heb-Hebr)": 2.33, + "MassiveIntentClassification (khm-Khmr)": 4.6, + "MassiveIntentClassification (nld-Latn)": 34.66, + "MassiveIntentClassification (ind-Latn)": 33.31, + "MassiveIntentClassification (mal-Mlym)": 2.63, + "MassiveIntentClassification (tam-Taml)": 2.22, + "MassiveIntentClassification (mya-Mymr)": 3.57, + "MassiveIntentClassification (urd-Arab)": 3.36, + "MassiveIntentClassification (dan-Latn)": 38.66, + "MassiveIntentClassification (cmo-Hant)": 5.29, + "MassiveIntentClassification (ron-Latn)": 37.45, + "MassiveIntentClassification (lav-Latn)": 23.92, + "MassiveIntentClassification (fra-Latn)": 40.29, + "MassiveIntentClassification (ell-Grek)": 11.14, + "MassiveIntentClassification (sqi-Latn)": 35.84, + "MassiveIntentClassification (hun-Latn)": 26.74, + "MassiveIntentClassification (kor-Kore)": 2.69, + "MassiveIntentClassification (ara-Arab)": 5.19, + "MassiveScenarioClassification (swa-Latn)": 25.61, + "MassiveScenarioClassification (aze-Latn)": 24.48, + "MassiveScenarioClassification (tur-Latn)": 31.38, + "MassiveScenarioClassification (cmo-Hans)": 9.98, + "MassiveScenarioClassification (amh-Ethi)": 7.59, + "MassiveScenarioClassification (kan-Knda)": 8.73, + "MassiveScenarioClassification (hin-Deva)": 8.77, + "MassiveScenarioClassification (tgl-Latn)": 35.12, + "MassiveScenarioClassification (tha-Thai)": 8.69, + "MassiveScenarioClassification (swe-Latn)": 35.83, + "MassiveScenarioClassification (deu-Latn)": 41.72, + "MassiveScenarioClassification (spa-Latn)": 43.33, + "MassiveScenarioClassification (por-Latn)": 44.62, + "MassiveScenarioClassification (jpn-Jpan)": 9.51, + "MassiveScenarioClassification (fin-Latn)": 33.79, + "MassiveScenarioClassification (kat-Geor)": 7.32, + "MassiveScenarioClassification (slv-Latn)": 37.6, + "MassiveScenarioClassification (rus-Cyrl)": 65.15, + "MassiveScenarioClassification (ita-Latn)": 47.28, + "MassiveScenarioClassification (tel-Telu)": 7.53, + "MassiveScenarioClassification (afr-Latn)": 37.27, + "MassiveScenarioClassification (isl-Latn)": 30.32, + "MassiveScenarioClassification (fas-Arab)": 6.83, + "MassiveScenarioClassification (vie-Latn)": 28.92, + "MassiveScenarioClassification (ben-Beng)": 8.57, + "MassiveScenarioClassification (hye-Armn)": 8.91, + "MassiveScenarioClassification (pol-Latn)": 33.75, + "MassiveScenarioClassification (cym-Latn)": 30.38, + "MassiveScenarioClassification (jav-Latn)": 33.94, + "MassiveScenarioClassification (mon-Cyrl)": 41.53, + "MassiveScenarioClassification (en)": 65.43, + "MassiveScenarioClassification (msa-Latn)": 36.28, + "MassiveScenarioClassification (nob-Latn)": 42.43, + "MassiveScenarioClassification (heb-Hebr)": 8.64, + "MassiveScenarioClassification (khm-Khmr)": 9.99, + "MassiveScenarioClassification (nld-Latn)": 41.47, + "MassiveScenarioClassification (ind-Latn)": 39.05, + "MassiveScenarioClassification (mal-Mlym)": 7.24, + "MassiveScenarioClassification (tam-Taml)": 7.71, + "MassiveScenarioClassification (mya-Mymr)": 9.94, + "MassiveScenarioClassification (urd-Arab)": 9.16, + "MassiveScenarioClassification (dan-Latn)": 44.69, + "MassiveScenarioClassification (cmo-Hant)": 10.48, + "MassiveScenarioClassification (ron-Latn)": 44.55, + "MassiveScenarioClassification (lav-Latn)": 26.26, + "MassiveScenarioClassification (fra-Latn)": 45.08, + "MassiveScenarioClassification (ell-Grek)": 19.46, + "MassiveScenarioClassification (sqi-Latn)": 40.9, + "MassiveScenarioClassification (hun-Latn)": 33.92, + "MassiveScenarioClassification (kor-Kore)": 7.37, + "MassiveScenarioClassification (ara-Arab)": 12.43, + "RuReviewsClassification (rus-Cyrl)": 58.01, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 52.8, + "RuSciBenchOECDClassification (rus-Cyrl)": 40.36 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "LaBSE-en-ru", + "GeoreviewClusteringP2P (rus-Cyrl)": 51.89, + "MLSUMClusteringP2P (rus-Cyrl)": 37.87, + "MLSUMClusteringS2S (rus-Cyrl)": 41.24, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 47.48, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.16 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "LaBSE-en-ru", + "OpusparcusPC (rus-Cyrl)": 87.18, + "TERRa (rus-Cyrl)": 55.61 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "LaBSE-en-ru", + "RuBQReranking (rus-Cyrl)": 54.83 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "LaBSE-en-ru", + "RiaNewsRetrieval (rus-Cyrl)": 34.73, + "RuBQRetrieval (rus-Cyrl)": 29.03 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "LaBSE-en-ru", + "RUParaPhraserSTS (rus-Cyrl)": 65.87, + "RuSTSBenchmarkSTS (rus-Cyrl)": 73.32, + "STS22 (deu-Latn)": 38.9, + "STS22 (en)": 59.47, + "STS22 (pol-Latn_eng-Latn)": 58.73, + "STS22 (spa-Latn)": 60.85, + "STS22 (fra-Latn)": 74.98, + "STS22 (deu-Latn_eng-Latn)": 47.98, + "STS22 (deu-Latn_fra-Latn)": 59.4, + "STS22 (deu-Latn_pol-Latn)": 39.48, + "STS22 (pol-Latn)": 32.74, + "STS22 (tur-Latn)": 55.04, + "STS22 (spa-Latn_eng-Latn)": 70.8, + "STS22 (rus-Cyrl)": 58.53, + "STS22 (ita-Latn)": 68.58, + "STS22 (fra-Latn_pol-Latn)": 61.98, + "STS22 (spa-Latn_ita-Latn)": 66.83, + "STS22 (cmn-Hans_eng-Latn)": 24.98, + "STS22 (ara-Arab)": 31.85, + "STS22 (cmn-Hans)": 35.1, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 73.02 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "LaBSE-en-ru" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "LaBSE-en-ru" + } + ] + } + }, + "text-search-babbage-001": { + "BitextMining": { + "f1": [ + { + "Model": "text-search-babbage-001" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "text-search-babbage-001" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "text-search-babbage-001" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "text-search-babbage-001" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "text-search-babbage-001" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "text-search-babbage-001", + "ArguAna": 49.2, + "ClimateFEVER": 19.9, + "FEVER": 77.0, + "FiQA2018": 42.2, + "HotpotQA": 63.1, + "NFCorpus": 36.7, + "QuoraRetrieval": 69.7, + "SciFact": 70.4, + "TRECCOVID": 58.5, + "Touche2020": 29.7 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "text-search-babbage-001" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "text-search-babbage-001" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "text-search-babbage-001" + } + ] + } + }, + "xlm-roberta-base": { + "BitextMining": { + "f1": [ + { + "Model": "xlm-roberta-base", + "BornholmBitextMining": 4.42 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "xlm-roberta-base", + "AmazonReviewsClassification (fr)": 26.75, + "AngryTweetsClassification": 52.41, + "DKHateClassification": 56.78, + "DanishPoliticalCommentsClassification": 34.03, + "LccSentimentClassification": 52.27, + "MTOPDomainClassification (fr)": 43.83, + "MTOPIntentClassification (fr)": 19.38, + "MasakhaNEWSClassification (fra)": 60.5, + "MassiveIntentClassification (da)": 41.06, + "MassiveIntentClassification (nb)": 40.46, + "MassiveIntentClassification (sv)": 45.12, + "MassiveIntentClassification (fr)": 13.58, + "MassiveScenarioClassification (da)": 43.91, + "MassiveScenarioClassification (nb)": 44.83, + "MassiveScenarioClassification (sv)": 47.35, + "MassiveScenarioClassification (fr)": 23.21, + "NoRecClassification": 46.28, + "NordicLangClassification": 79.39, + "NorwegianParliament": 56.75, + "ScalaDaClassification": 57.3, + "ScalaNbClassification": 58.33 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "xlm-roberta-base", + "AlloProfClusteringP2P": 52.24, + "AlloProfClusteringS2S": 20.37, + "HALClusteringS2S": 8.68, + "MLSUMClusteringP2P": 40.44, + "MLSUMClusteringS2S": 24.14, + "MasakhaNEWSClusteringP2P (fra)": 29.29, + "MasakhaNEWSClusteringS2S (fra)": 23.76 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "xlm-roberta-base", + "OpusparcusPC (fr)": 85.45, + "PawsXPairClassification (fr)": 51.35 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "xlm-roberta-base", + "AlloprofReranking": 25.58, + "SyntecReranking": 43.75 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "xlm-roberta-base", + "AlloprofRetrieval": 0.16, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 0.88, + "SyntecRetrieval": 3.33, + "XPQARetrieval (fr)": 11.65 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "xlm-roberta-base", + "SICKFr": 48.62, + "STS22 (fr)": 56.72, + "STSBenchmarkMultilingualSTS (fr)": 46.23 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "xlm-roberta-base", + "SummEvalFr": 29.14 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "xlm-roberta-base" + } + ] + } + }, + "titan-embed-text-v1": { + "BitextMining": { + "f1": [ + { + "Model": "titan-embed-text-v1" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "titan-embed-text-v1", + "AmazonCounterfactualClassification (en)": 61.85, + "Banking77Classification": 83.21 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "titan-embed-text-v1" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "titan-embed-text-v1" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "titan-embed-text-v1", + "SciDocsRR": 88.87 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "titan-embed-text-v1", + "ArguAna": 48.83, + "FiQA2018": 40.38, + "MSMARCO": 35.19, + "NQ": 51.08, + "SciFact": 73.5, + "TRECCOVID": 54.74 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "titan-embed-text-v1", + "BIOSSES": 84.17, + "SICK-R": 73.05, + "STS12": 66.59, + "STS13": 83.24, + "STS14": 73.71, + "STS15": 82.4, + "STS16": NaN, + "STS17 (en-en)": 80.9, + "STSBenchmark": 74.85 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "titan-embed-text-v1" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "titan-embed-text-v1" + } + ] + } + }, + "bge-small-en-v1.5": { + "BitextMining": { + "f1": [ + { + "Model": "bge-small-en-v1.5" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "bge-small-en-v1.5" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "bge-small-en-v1.5" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "bge-small-en-v1.5" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "bge-small-en-v1.5" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "bge-small-en-v1.5", + "ARCChallenge": 8.95, + "AlphaNLI": 11.64, + "HellaSwag": 25.44, + "PIQA": 23.92, + "Quail": 1.75, + "RARbCode": 42.36, + "RARbMath": 44.98, + "SIQA": 0.77, + "SpartQA": 3.55, + "TempReasonL1": 1.41, + "TempReasonL2Fact": 17.56, + "TempReasonL2Pure": 1.05, + "TempReasonL3Fact": 13.88, + "TempReasonL3Pure": 4.76, + "WinoGrande": 10.28 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "bge-small-en-v1.5" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "bge-small-en-v1.5" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "bge-small-en-v1.5" + } + ] + } + }, + "bert-base-swedish-cased": { + "BitextMining": { + "f1": [ + { + "Model": "bert-base-swedish-cased", + "BornholmBitextMining": 6.6 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "bert-base-swedish-cased", + "AngryTweetsClassification": 44.58, + "DKHateClassification": 55.53, + "DanishPoliticalCommentsClassification": 28.97, + "LccSentimentClassification": 41.2, + "MassiveIntentClassification (da)": 37.98, + "MassiveIntentClassification (nb)": 35.75, + "MassiveIntentClassification (sv)": 52.75, + "MassiveScenarioClassification (da)": 40.44, + "MassiveScenarioClassification (nb)": 35.76, + "MassiveScenarioClassification (sv)": 56.09, + "NoRecClassification": 43.91, + "NordicLangClassification": 62.45, + "NorwegianParliament": 57.56, + "ScalaDaClassification": 53.53, + "ScalaNbClassification": 53.63 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "bert-base-swedish-cased" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "bert-base-swedish-cased" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "bert-base-swedish-cased" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "bert-base-swedish-cased" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "bert-base-swedish-cased" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "bert-base-swedish-cased" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "bert-base-swedish-cased" + } + ] + } + }, + "GritLM-7B": { + "BitextMining": { + "f1": [ + { + "Model": "GritLM-7B", + "BornholmBitextMining (dan-Latn)": 45.13, + "Tatoeba (csb-Latn_eng-Latn)": 50.13, + "Tatoeba (ceb-Latn_eng-Latn)": 33.5, + "Tatoeba (cmn-Hans_eng-Latn)": 94.08, + "Tatoeba (uzb-Latn_eng-Latn)": 41.69, + "Tatoeba (kur-Latn_eng-Latn)": 27.94, + "Tatoeba (ita-Latn_eng-Latn)": 91.2, + "Tatoeba (lvs-Latn_eng-Latn)": 53.54, + "Tatoeba (yid-Hebr_eng-Latn)": 17.13, + "Tatoeba (gle-Latn_eng-Latn)": 48.14, + "Tatoeba (ast-Latn_eng-Latn)": 79.11, + "Tatoeba (ang-Latn_eng-Latn)": 76.84, + "Tatoeba (jav-Latn_eng-Latn)": 26.6, + "Tatoeba (ina-Latn_eng-Latn)": 91.24, + "Tatoeba (nob-Latn_eng-Latn)": 93.53, + "Tatoeba (swe-Latn_eng-Latn)": 90.43, + "Tatoeba (lfn-Latn_eng-Latn)": 62.23, + "Tatoeba (fin-Latn_eng-Latn)": 85.76, + "Tatoeba (fry-Latn_eng-Latn)": 61.16, + "Tatoeba (gsw-Latn_eng-Latn)": 53.28, + "Tatoeba (rus-Cyrl_eng-Latn)": 91.82, + "Tatoeba (tat-Cyrl_eng-Latn)": 24.46, + "Tatoeba (mal-Mlym_eng-Latn)": 33.79, + "Tatoeba (hrv-Latn_eng-Latn)": 91.04, + "Tatoeba (ind-Latn_eng-Latn)": 90.05, + "Tatoeba (tam-Taml_eng-Latn)": 46.27, + "Tatoeba (kaz-Cyrl_eng-Latn)": 36.27, + "Tatoeba (uig-Arab_eng-Latn)": 22.6, + "Tatoeba (slv-Latn_eng-Latn)": 82.71, + "Tatoeba (pms-Latn_eng-Latn)": 50.41, + "Tatoeba (lit-Latn_eng-Latn)": 56.36, + "Tatoeba (cha-Latn_eng-Latn)": 34.69, + "Tatoeba (est-Latn_eng-Latn)": 46.73, + "Tatoeba (mhr-Cyrl_eng-Latn)": 10.8, + "Tatoeba (dan-Latn_eng-Latn)": 92.01, + "Tatoeba (pol-Latn_eng-Latn)": 95.6, + "Tatoeba (nov-Latn_eng-Latn)": 64.85, + "Tatoeba (swh-Latn_eng-Latn)": 46.09, + "Tatoeba (tha-Thai_eng-Latn)": 81.25, + "Tatoeba (arz-Arab_eng-Latn)": 52.97, + "Tatoeba (epo-Latn_eng-Latn)": 76.87, + "Tatoeba (deu-Latn_eng-Latn)": 98.02, + "Tatoeba (hye-Armn_eng-Latn)": 35.94, + "Tatoeba (afr-Latn_eng-Latn)": 79.17, + "Tatoeba (gla-Latn_eng-Latn)": 40.8, + "Tatoeba (isl-Latn_eng-Latn)": 74.94, + "Tatoeba (awa-Deva_eng-Latn)": 44.31, + "Tatoeba (ido-Latn_eng-Latn)": 65.69, + "Tatoeba (kor-Hang_eng-Latn)": 87.43, + "Tatoeba (amh-Ethi_eng-Latn)": 6.18, + "Tatoeba (eus-Latn_eng-Latn)": 31.88, + "Tatoeba (mkd-Cyrl_eng-Latn)": 73.82, + "Tatoeba (tur-Latn_eng-Latn)": 86.62, + "Tatoeba (pes-Arab_eng-Latn)": 78.98, + "Tatoeba (heb-Hebr_eng-Latn)": 61.75, + "Tatoeba (aze-Latn_eng-Latn)": 64.11, + "Tatoeba (hun-Latn_eng-Latn)": 88.54, + "Tatoeba (bul-Cyrl_eng-Latn)": 90.37, + "Tatoeba (kab-Latn_eng-Latn)": 2.9, + "Tatoeba (cat-Latn_eng-Latn)": 90.66, + "Tatoeba (dsb-Latn_eng-Latn)": 51.72, + "Tatoeba (kat-Geor_eng-Latn)": 38.42, + "Tatoeba (urd-Arab_eng-Latn)": 68.02, + "Tatoeba (wuu-Hans_eng-Latn)": 80.28, + "Tatoeba (oci-Latn_eng-Latn)": 58.12, + "Tatoeba (arq-Arab_eng-Latn)": 30.52, + "Tatoeba (ron-Latn_eng-Latn)": 90.29, + "Tatoeba (bos-Latn_eng-Latn)": 87.33, + "Tatoeba (nds-Latn_eng-Latn)": 64.54, + "Tatoeba (tgl-Latn_eng-Latn)": 83.24, + "Tatoeba (glg-Latn_eng-Latn)": 86.69, + "Tatoeba (ben-Beng_eng-Latn)": 61.32, + "Tatoeba (khm-Khmr_eng-Latn)": 16.4, + "Tatoeba (ukr-Cyrl_eng-Latn)": 90.19, + "Tatoeba (max-Deva_eng-Latn)": 51.87, + "Tatoeba (lat-Latn_eng-Latn)": 80.43, + "Tatoeba (xho-Latn_eng-Latn)": 28.43, + "Tatoeba (spa-Latn_eng-Latn)": 96.75, + "Tatoeba (tzl-Latn_eng-Latn)": 42.85, + "Tatoeba (ara-Arab_eng-Latn)": 76.77, + "Tatoeba (vie-Latn_eng-Latn)": 91.32, + "Tatoeba (ces-Latn_eng-Latn)": 92.02, + "Tatoeba (jpn-Jpan_eng-Latn)": 91.9, + "Tatoeba (bel-Cyrl_eng-Latn)": 76.21, + "Tatoeba (mon-Cyrl_eng-Latn)": 27.38, + "Tatoeba (nld-Latn_eng-Latn)": 94.96, + "Tatoeba (war-Latn_eng-Latn)": 27.75, + "Tatoeba (bre-Latn_eng-Latn)": 12.59, + "Tatoeba (por-Latn_eng-Latn)": 93.41, + "Tatoeba (ile-Latn_eng-Latn)": 76.72, + "Tatoeba (mar-Deva_eng-Latn)": 51.54, + "Tatoeba (fao-Latn_eng-Latn)": 62.03, + "Tatoeba (slk-Latn_eng-Latn)": 84.96, + "Tatoeba (tel-Telu_eng-Latn)": 24.26, + "Tatoeba (cym-Latn_eng-Latn)": 50.03, + "Tatoeba (srp-Cyrl_eng-Latn)": 88.45, + "Tatoeba (swg-Latn_eng-Latn)": 52.09, + "Tatoeba (hin-Deva_eng-Latn)": 84.19, + "Tatoeba (yue-Hant_eng-Latn)": 79.5, + "Tatoeba (fra-Latn_eng-Latn)": 92.47, + "Tatoeba (cor-Latn_eng-Latn)": 6.97, + "Tatoeba (hsb-Latn_eng-Latn)": 64.48, + "Tatoeba (zsm-Latn_eng-Latn)": 90.06, + "Tatoeba (ber-Tfng_eng-Latn)": 6.2, + "Tatoeba (pam-Latn_eng-Latn)": 12.11, + "Tatoeba (kzj-Latn_eng-Latn)": 9.61, + "Tatoeba (dtp-Latn_eng-Latn)": 8.37, + "Tatoeba (nno-Latn_eng-Latn)": 80.89, + "Tatoeba (ell-Grek_eng-Latn)": 80.13, + "Tatoeba (orv-Cyrl_eng-Latn)": 45.88, + "Tatoeba (sqi-Latn_eng-Latn)": 54.37, + "Tatoeba (tuk-Latn_eng-Latn)": 30.47, + "Tatoeba (cbk-Latn_eng-Latn)": 67.64 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "GritLM-7B", + "AllegroReviews (pol-Latn)": 37.32, + "AmazonCounterfactualClassification (en-ext)": 70.34, + "AmazonCounterfactualClassification (en)": 71.1, + "AmazonCounterfactualClassification (deu-Latn)": 67.63, + "AmazonCounterfactualClassification (jpn-Jpan)": 73.3, + "AmazonPolarityClassification": 86.69, + "AmazonReviewsClassification (en)": 45.51, + "AmazonReviewsClassification (deu-Latn)": 43.77, + "AmazonReviewsClassification (spa-Latn)": 43.0, + "AmazonReviewsClassification (fra-Latn)": 44.15, + "AmazonReviewsClassification (jpn-Jpan)": 41.49, + "AmazonReviewsClassification (cmn-Hans)": 35.34, + "AngryTweetsClassification (dan-Latn)": 54.68, + "Banking77Classification": 79.36, + "CBD (pol-Latn)": 70.98, + "DanishPoliticalCommentsClassification (dan-Latn)": 37.69, + "EmotionClassification": 48.79, + "GeoreviewClassification (rus-Cyrl)": 45.72, + "HeadlineClassification (rus-Cyrl)": 78.05, + "IFlyTek (cmn-Hans)": 48.49, + "ImdbClassification": 82.25, + "InappropriatenessClassification (rus-Cyrl)": 60.11, + "JDReview (cmn-Hans)": 84.02, + "KinopoiskClassification (rus-Cyrl)": 56.14, + "LccSentimentClassification (dan-Latn)": 57.2, + "MTOPDomainClassification (en)": 92.67, + "MTOPDomainClassification (deu-Latn)": 88.32, + "MTOPDomainClassification (spa-Latn)": 88.45, + "MTOPDomainClassification (fra-Latn)": 88.44, + "MTOPDomainClassification (hin-Deva)": 86.89, + "MTOPDomainClassification (tha-Thai)": 82.97, + "MTOPIntentClassification (en)": 69.77, + "MTOPIntentClassification (deu-Latn)": 69.53, + "MTOPIntentClassification (spa-Latn)": 67.49, + "MTOPIntentClassification (fra-Latn)": 65.93, + "MTOPIntentClassification (hin-Deva)": 59.47, + "MTOPIntentClassification (tha-Thai)": 65.14, + "MasakhaNEWSClassification (amh-Ethi)": 53.06, + "MasakhaNEWSClassification (eng)": 77.57, + "MasakhaNEWSClassification (fra-Latn)": 77.39, + "MasakhaNEWSClassification (hau-Latn)": 74.66, + "MasakhaNEWSClassification (ibo-Latn)": 68.64, + "MasakhaNEWSClassification (lin-Latn)": 74.23, + "MasakhaNEWSClassification (lug-Latn)": 72.33, + "MasakhaNEWSClassification (orm-Ethi)": 77.6, + "MasakhaNEWSClassification (pcm-Latn)": 91.28, + "MasakhaNEWSClassification (run-Latn)": 76.3, + "MasakhaNEWSClassification (sna-Latn)": 85.99, + "MasakhaNEWSClassification (som-Latn)": 63.71, + "MasakhaNEWSClassification (swa-Latn)": 73.4, + "MasakhaNEWSClassification (tir-Ethi)": 34.41, + "MasakhaNEWSClassification (xho-Latn)": 83.27, + "MasakhaNEWSClassification (yor-Latn)": 80.92, + "MassiveIntentClassification (mya-Mymr)": 36.92, + "MassiveIntentClassification (en)": 71.52, + "MassiveIntentClassification (slv-Latn)": 63.08, + "MassiveIntentClassification (sqi-Latn)": 50.98, + "MassiveIntentClassification (kor-Kore)": 65.71, + "MassiveIntentClassification (aze-Latn)": 56.24, + "MassiveIntentClassification (isl-Latn)": 51.96, + "MassiveIntentClassification (hin-Deva)": 61.18, + "MassiveIntentClassification (dan-Latn)": 65.39, + "MassiveIntentClassification (vie-Latn)": 62.05, + "MassiveIntentClassification (heb-Hebr)": 57.71, + "MassiveIntentClassification (tur-Latn)": 65.26, + "MassiveIntentClassification (cmo-Hans)": 67.43, + "MassiveIntentClassification (khm-Khmr)": 38.86, + "MassiveIntentClassification (deu-Latn)": 67.75, + "MassiveIntentClassification (fas-Arab)": 65.98, + "MassiveIntentClassification (jav-Latn)": 50.25, + "MassiveIntentClassification (nld-Latn)": 66.82, + "MassiveIntentClassification (jpn-Jpan)": 68.56, + "MassiveIntentClassification (ita-Latn)": 68.04, + "MassiveIntentClassification (cym-Latn)": 48.59, + "MassiveIntentClassification (pol-Latn)": 67.97, + "MassiveIntentClassification (fin-Latn)": 60.55, + "MassiveIntentClassification (tha-Thai)": 58.99, + "MassiveIntentClassification (lav-Latn)": 51.12, + "MassiveIntentClassification (mal-Mlym)": 43.57, + "MassiveIntentClassification (hun-Latn)": 63.48, + "MassiveIntentClassification (ind-Latn)": 65.58, + "MassiveIntentClassification (por-Latn)": 67.76, + "MassiveIntentClassification (tel-Telu)": 44.73, + "MassiveIntentClassification (amh-Ethi)": 34.73, + "MassiveIntentClassification (kan-Knda)": 44.51, + "MassiveIntentClassification (spa-Latn)": 66.45, + "MassiveIntentClassification (urd-Arab)": 54.11, + "MassiveIntentClassification (kat-Geor)": 42.01, + "MassiveIntentClassification (tam-Taml)": 43.48, + "MassiveIntentClassification (afr-Latn)": 59.48, + "MassiveIntentClassification (rus-Cyrl)": 69.41, + "MassiveIntentClassification (tgl-Latn)": 61.83, + "MassiveIntentClassification (ell-Grek)": 60.45, + "MassiveIntentClassification (hye-Armn)": 43.12, + "MassiveIntentClassification (ara-Arab)": 54.46, + "MassiveIntentClassification (fra-Latn)": 67.69, + "MassiveIntentClassification (mon-Cyrl)": 40.84, + "MassiveIntentClassification (msa-Latn)": 62.61, + "MassiveIntentClassification (nob-Latn)": 63.58, + "MassiveIntentClassification (ben-Beng)": 52.6, + "MassiveIntentClassification (cmo-Hant)": 62.06, + "MassiveIntentClassification (ron-Latn)": 62.45, + "MassiveIntentClassification (swe-Latn)": 67.73, + "MassiveIntentClassification (swa-Latn)": 50.1, + "MassiveScenarioClassification (cmo-Hant)": 67.7, + "MassiveScenarioClassification (kat-Geor)": 49.31, + "MassiveScenarioClassification (ind-Latn)": 72.36, + "MassiveScenarioClassification (amh-Ethi)": 42.0, + "MassiveScenarioClassification (ita-Latn)": 71.86, + "MassiveScenarioClassification (tur-Latn)": 68.71, + "MassiveScenarioClassification (tel-Telu)": 50.8, + "MassiveScenarioClassification (ell-Grek)": 67.42, + "MassiveScenarioClassification (deu-Latn)": 73.64, + "MassiveScenarioClassification (sqi-Latn)": 57.5, + "MassiveScenarioClassification (cym-Latn)": 57.36, + "MassiveScenarioClassification (spa-Latn)": 71.12, + "MassiveScenarioClassification (nld-Latn)": 72.47, + "MassiveScenarioClassification (swa-Latn)": 58.93, + "MassiveScenarioClassification (cmo-Hans)": 71.91, + "MassiveScenarioClassification (fin-Latn)": 65.91, + "MassiveScenarioClassification (por-Latn)": 70.99, + "MassiveScenarioClassification (hun-Latn)": 69.68, + "MassiveScenarioClassification (slv-Latn)": 70.25, + "MassiveScenarioClassification (urd-Arab)": 62.48, + "MassiveScenarioClassification (hye-Armn)": 49.32, + "MassiveScenarioClassification (pol-Latn)": 71.86, + "MassiveScenarioClassification (khm-Khmr)": 45.52, + "MassiveScenarioClassification (kan-Knda)": 49.51, + "MassiveScenarioClassification (hin-Deva)": 66.18, + "MassiveScenarioClassification (heb-Hebr)": 63.3, + "MassiveScenarioClassification (rus-Cyrl)": 73.87, + "MassiveScenarioClassification (mal-Mlym)": 48.53, + "MassiveScenarioClassification (afr-Latn)": 67.34, + "MassiveScenarioClassification (vie-Latn)": 69.19, + "MassiveScenarioClassification (fra-Latn)": 70.79, + "MassiveScenarioClassification (ben-Beng)": 58.75, + "MassiveScenarioClassification (lav-Latn)": 57.3, + "MassiveScenarioClassification (tam-Taml)": 50.9, + "MassiveScenarioClassification (en)": 73.87, + "MassiveScenarioClassification (aze-Latn)": 61.74, + "MassiveScenarioClassification (swe-Latn)": 73.24, + "MassiveScenarioClassification (kor-Kore)": 70.76, + "MassiveScenarioClassification (ron-Latn)": 68.54, + "MassiveScenarioClassification (msa-Latn)": 69.72, + "MassiveScenarioClassification (mya-Mymr)": 44.25, + "MassiveScenarioClassification (fas-Arab)": 70.5, + "MassiveScenarioClassification (tha-Thai)": 64.51, + "MassiveScenarioClassification (jpn-Jpan)": 72.81, + "MassiveScenarioClassification (nob-Latn)": 69.75, + "MassiveScenarioClassification (tgl-Latn)": 69.0, + "MassiveScenarioClassification (dan-Latn)": 71.51, + "MassiveScenarioClassification (ara-Arab)": 61.51, + "MassiveScenarioClassification (jav-Latn)": 58.24, + "MassiveScenarioClassification (isl-Latn)": 61.61, + "MassiveScenarioClassification (mon-Cyrl)": 46.6, + "MultilingualSentiment (cmn-Hans)": 68.13, + "NoRecClassification (nob-Latn)": 52.05, + "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 63.6, + "OnlineShopping (cmn-Hans)": 86.99, + "PAC (pol-Latn)": 68.09, + "PolEmo2.0-IN (pol-Latn)": 66.07, + "PolEmo2.0-OUT (pol-Latn)": 32.94, + "RuReviewsClassification (rus-Cyrl)": 61.42, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 58.93, + "RuSciBenchOECDClassification (rus-Cyrl)": 45.83, + "TNews (cmn-Hans)": 49.94, + "ToxicConversationsClassification": 63.9, + "TweetSentimentExtractionClassification": 57.14, + "Waimai (cmn-Hans)": 84.92 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "GritLM-7B", + "GeoreviewClusteringP2P (rus-Cyrl)": 74.06, + "MasakhaNEWSClusteringP2P (amh-Ethi)": 45.2, + "MasakhaNEWSClusteringP2P (eng)": 70.5, + "MasakhaNEWSClusteringP2P (fra-Latn)": 73.54, + "MasakhaNEWSClusteringP2P (hau-Latn)": 51.33, + "MasakhaNEWSClusteringP2P (ibo-Latn)": 66.75, + "MasakhaNEWSClusteringP2P (lin-Latn)": 59.57, + "MasakhaNEWSClusteringP2P (lug-Latn)": 58.93, + "MasakhaNEWSClusteringP2P (orm-Ethi)": 54.38, + "MasakhaNEWSClusteringP2P (pcm-Latn)": 92.67, + "MasakhaNEWSClusteringP2P (run-Latn)": 59.51, + "MasakhaNEWSClusteringP2P (sna-Latn)": 68.86, + "MasakhaNEWSClusteringP2P (som-Latn)": 41.42, + "MasakhaNEWSClusteringP2P (swa-Latn)": 33.61, + "MasakhaNEWSClusteringP2P (tir-Ethi)": 51.68, + "MasakhaNEWSClusteringP2P (xho-Latn)": 46.65, + "MasakhaNEWSClusteringP2P (yor-Latn)": 52.39, + "MasakhaNEWSClusteringS2S (amh-Ethi)": 43.39, + "MasakhaNEWSClusteringS2S (eng)": 65.85, + "MasakhaNEWSClusteringS2S (fra-Latn)": 68.87, + "MasakhaNEWSClusteringS2S (hau-Latn)": 33.02, + "MasakhaNEWSClusteringS2S (ibo-Latn)": 64.55, + "MasakhaNEWSClusteringS2S (lin-Latn)": 72.01, + "MasakhaNEWSClusteringS2S (lug-Latn)": 47.42, + "MasakhaNEWSClusteringS2S (orm-Ethi)": 32.59, + "MasakhaNEWSClusteringS2S (pcm-Latn)": 97.82, + "MasakhaNEWSClusteringS2S (run-Latn)": 59.41, + "MasakhaNEWSClusteringS2S (sna-Latn)": 71.58, + "MasakhaNEWSClusteringS2S (som-Latn)": 40.91, + "MasakhaNEWSClusteringS2S (swa-Latn)": 33.54, + "MasakhaNEWSClusteringS2S (tir-Ethi)": 45.32, + "MasakhaNEWSClusteringS2S (xho-Latn)": 28.94, + "MasakhaNEWSClusteringS2S (yor-Latn)": 63.26, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 60.01, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 51.66 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "GritLM-7B", + "CDSC-E (pol-Latn)": 72.65, + "OpusparcusPC (deu-Latn)": 96.65, + "OpusparcusPC (en)": 98.57, + "OpusparcusPC (fin-Latn)": 90.41, + "OpusparcusPC (fra-Latn)": 93.41, + "OpusparcusPC (rus-Cyrl)": 88.63, + "OpusparcusPC (swe-Latn)": 94.04, + "PSC (pol-Latn)": 99.43, + "PawsXPairClassification (deu-Latn)": 58.5, + "PawsXPairClassification (en)": 63.78, + "PawsXPairClassification (spa-Latn)": 59.15, + "PawsXPairClassification (fra-Latn)": 61.89, + "PawsXPairClassification (jpn-Hira)": 51.46, + "PawsXPairClassification (kor-Hang)": 52.15, + "PawsXPairClassification (cmn-Hans)": 57.66, + "SICK-E-PL (pol-Latn)": 75.98, + "SprintDuplicateQuestions": 93.06, + "TERRa (rus-Cyrl)": 59.39, + "TwitterSemEval2015": 71.24, + "TwitterURLCorpus": 84.54 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "GritLM-7B", + "AlloprofReranking (fra-Latn)": 77.95, + "AskUbuntuDupQuestions": 61.11, + "MMarcoReranking (cmn-Hans)": 21.7, + "MindSmallReranking": 31.53, + "RuBQReranking (rus-Cyrl)": 72.41, + "SciDocsRR": 84.78, + "StackOverflowDupQuestions": 50.95, + "SyntecReranking (fra-Latn)": 83.32, + "T2Reranking (cmn-Hans)": 65.63 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "GritLM-7B", + "AILACasedocs": 35.31, + "AILAStatutes": 41.8, + "ARCChallenge": 26.68, + "AlloprofRetrieval (fra-Latn)": 55.42, + "AlphaNLI": 34.0, + "ArguAna": 63.17, + "ArguAna-PL (pol-Latn)": 48.89, + "BSARDRetrieval (fra-Latn)": 26.63, + "BrightRetrieval (pony)": 21.98, + "BrightRetrieval (robotics)": 17.31, + "BrightRetrieval (economics)": 19.0, + "BrightRetrieval (theoremqa_questions)": 23.34, + "BrightRetrieval (leetcode)": 29.85, + "BrightRetrieval (earth_science)": 32.77, + "BrightRetrieval (stackoverflow)": 11.62, + "BrightRetrieval (sustainable_living)": 18.04, + "BrightRetrieval (biology)": 25.04, + "BrightRetrieval (psychology)": 19.92, + "BrightRetrieval (theoremqa_theorems)": 17.41, + "BrightRetrieval (aops)": 8.91, + "CmedqaRetrieval (cmn-Hans)": 35.58, + "CovidRetrieval (cmn-Hans)": 73.47, + "DuRetrieval (cmn-Hans)": 88.18, + "EcomRetrieval (cmn-Hans)": 54.33, + "FiQA-PL (pol-Latn)": 38.04, + "FiQA2018": 59.91, + "GerDaLIRSmall (deu-Latn)": 20.61, + "HellaSwag": 39.45, + "LEMBNarrativeQARetrieval": 41.46, + "LEMBNeedleRetrieval": 33.25, + "LEMBPasskeyRetrieval": 38.25, + "LEMBQMSumRetrieval": 30.32, + "LEMBSummScreenFDRetrieval": 78.49, + "LEMBWikimQARetrieval": 60.8, + "LeCaRDv2 (zho-Hans)": 64.05, + "LegalBenchConsumerContractsQA": 82.1, + "LegalBenchCorporateLobbying": 95.0, + "LegalQuAD (deu-Latn)": 44.18, + "LegalSummarization": 70.64, + "MMarcoRetrieval (cmn-Hans)": 76.54, + "MedicalRetrieval (cmn-Hans)": 55.81, + "MintakaRetrieval (ara-Arab)": 25.88, + "MintakaRetrieval (deu-Latn)": 55.66, + "MintakaRetrieval (spa-Latn)": 53.36, + "MintakaRetrieval (fra-Latn)": 51.68, + "MintakaRetrieval (hin-Deva)": 26.06, + "MintakaRetrieval (ita-Latn)": 54.91, + "MintakaRetrieval (jpn-Hira)": 34.1, + "MintakaRetrieval (por-Latn)": 54.91, + "NFCorpus": 40.86, + "NFCorpus-PL (pol-Latn)": 32.88, + "PIQA": 44.35, + "Quail": 11.69, + "RARbCode": 84.0, + "RARbMath": 82.35, + "RuBQRetrieval (rus-Cyrl)": 70.94, + "SCIDOCS": 24.4, + "SCIDOCS-PL (pol-Latn)": 18.39, + "SIQA": 7.23, + "SciFact": 79.13, + "SciFact-PL (pol-Latn)": 73.22, + "SpartQA": 9.29, + "SyntecRetrieval (fra-Latn)": 89.48, + "T2Retrieval (cmn-Hans)": 82.96, + "TRECCOVID": 74.36, + "TRECCOVID-PL (pol-Latn)": 58.01, + "TempReasonL1": 7.15, + "TempReasonL2Fact": 58.38, + "TempReasonL2Pure": 11.22, + "TempReasonL3Fact": 44.29, + "TempReasonL3Pure": 14.15, + "Touche2020": 27.81, + "VideoRetrieval (cmn-Hans)": 53.85, + "WinoGrande": 53.74, + "XPQARetrieval (ara-Arab_ara-Arab)": 45.21, + "XPQARetrieval (eng-Latn_ara-Arab)": 27.32, + "XPQARetrieval (ara-Arab_eng-Latn)": 39.43, + "XPQARetrieval (deu-Latn_deu-Latn)": 76.58, + "XPQARetrieval (eng-Latn_deu-Latn)": 55.44, + "XPQARetrieval (deu-Latn_eng-Latn)": 72.56, + "XPQARetrieval (spa-Latn_spa-Latn)": 64.55, + "XPQARetrieval (eng-Latn_spa-Latn)": 45.49, + "XPQARetrieval (spa-Latn_eng-Latn)": 61.03, + "XPQARetrieval (fra-Latn_fra-Latn)": 70.85, + "XPQARetrieval (eng-Latn_fra-Latn)": 48.14, + "XPQARetrieval (fra-Latn_eng-Latn)": 66.96, + "XPQARetrieval (hin-Deva_hin-Deva)": 74.75, + "XPQARetrieval (eng-Latn_hin-Deva)": 25.61, + "XPQARetrieval (hin-Deva_eng-Latn)": 63.9, + "XPQARetrieval (ita-Latn_ita-Latn)": 76.53, + "XPQARetrieval (eng-Latn_ita-Latn)": 46.88, + "XPQARetrieval (ita-Latn_eng-Latn)": 71.03, + "XPQARetrieval (jpn-Hira_jpn-Hira)": 72.27, + "XPQARetrieval (eng-Latn_jpn-Hira)": 41.94, + "XPQARetrieval (jpn-Hira_eng-Latn)": 69.42, + "XPQARetrieval (kor-Hang_kor-Hang)": 40.64, + "XPQARetrieval (eng-Latn_kor-Hang)": 32.68, + "XPQARetrieval (kor-Hang_eng-Latn)": 36.0, + "XPQARetrieval (pol-Latn_pol-Latn)": 50.74, + "XPQARetrieval (eng-Latn_pol-Latn)": 33.14, + "XPQARetrieval (pol-Latn_eng-Latn)": 48.06, + "XPQARetrieval (por-Latn_por-Latn)": 49.86, + "XPQARetrieval (eng-Latn_por-Latn)": 33.01, + "XPQARetrieval (por-Latn_eng-Latn)": 48.45, + "XPQARetrieval (tam-Taml_tam-Taml)": 41.78, + "XPQARetrieval (eng-Latn_tam-Taml)": 10.95, + "XPQARetrieval (tam-Taml_eng-Latn)": 21.28, + "XPQARetrieval (cmn-Hans_cmn-Hans)": 65.29, + "XPQARetrieval (eng-Latn_cmn-Hans)": 35.86, + "XPQARetrieval (cmn-Hans_eng-Latn)": 58.12 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "GritLM-7B", + "AFQMC (cmn-Hans)": 32.65, + "ATEC (cmn-Hans)": 37.34, + "BIOSSES": 85.01, + "BQ (cmn-Hans)": 38.03, + "CDSC-R (pol-Latn)": 92.23, + "LCQMC (cmn-Hans)": 71.38, + "PAWSX (cmn-Hans)": 16.4, + "RUParaPhraserSTS (rus-Cyrl)": 71.08, + "RuSTSBenchmarkSTS (rus-Cyrl)": 76.26, + "SICK-R": 81.47, + "SICK-R-PL (pol-Latn)": 72.78, + "SICKFr (fra-Latn)": 76.91, + "STS12": 65.84, + "STS13": 78.37, + "STS14": 77.52, + "STS15": 85.43, + "STS16": 79.94, + "STS17 (ita-Latn_eng-Latn)": 88.42, + "STS17 (fra-Latn_eng-Latn)": 87.9, + "STS17 (kor-Hang)": 78.74, + "STS17 (en-en)": 90.12, + "STS17 (nld-Latn_eng-Latn)": 88.29, + "STS17 (ara-Arab)": 79.28, + "STS17 (eng-Latn_deu-Latn)": 88.92, + "STS17 (spa-Latn)": 87.12, + "STS17 (eng-Latn_tur-Latn)": 77.47, + "STS17 (spa-Latn_eng-Latn)": 87.47, + "STS17 (eng-Latn_ara-Arab)": 74.45, + "STS22 (spa-Latn_eng-Latn)": 80.76, + "STS22 (ara-Arab)": 55.45, + "STS22 (pol-Latn_eng-Latn)": 77.77, + "STS22 (deu-Latn_pol-Latn)": 55.09, + "STS22 (en)": 68.59, + "STS22 (rus-Cyrl)": 68.46, + "STS22 (deu-Latn_eng-Latn)": 62.33, + "STS22 (cmn-Hans)": 72.29, + "STS22 (pol-Latn)": 48.07, + "STS22 (fra-Latn)": 83.09, + "STS22 (cmn-Hans_eng-Latn)": 72.73, + "STS22 (deu-Latn_fra-Latn)": 62.14, + "STS22 (spa-Latn_ita-Latn)": 77.63, + "STS22 (fra-Latn_pol-Latn)": 84.52, + "STS22 (ita-Latn)": 77.58, + "STS22 (spa-Latn)": 72.24, + "STS22 (deu-Latn)": 59.34, + "STS22 (tur-Latn)": 70.83, + "STSB (cmn-Hans)": 74.11, + "STSBenchmark": 83.1, + "STSBenchmarkMultilingualSTS (spa-Latn)": 79.51, + "STSBenchmarkMultilingualSTS (ita-Latn)": 76.24, + "STSBenchmarkMultilingualSTS (por-Latn)": 76.61, + "STSBenchmarkMultilingualSTS (fra-Latn)": 77.48, + "STSBenchmarkMultilingualSTS (deu-Latn)": 77.57, + "STSBenchmarkMultilingualSTS (en)": 83.12, + "STSBenchmarkMultilingualSTS (nld-Latn)": 74.83, + "STSBenchmarkMultilingualSTS (pol-Latn)": 74.67, + "STSBenchmarkMultilingualSTS (cmn-Hans)": 75.27, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 76.19 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "GritLM-7B", + "SummEval": 30.26, + "SummEvalFr (fra-Latn)": 29.97 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "GritLM-7B", + "Core17InstructionRetrieval": 2.62, + "News21InstructionRetrieval": -1.01, + "Robust04InstructionRetrieval": -1.68 + } + ] + } + }, + "monobert-large-msmarco": { + "BitextMining": { + "f1": [ + { + "Model": "monobert-large-msmarco" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "monobert-large-msmarco" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "monobert-large-msmarco" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "monobert-large-msmarco" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "monobert-large-msmarco" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "monobert-large-msmarco" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "monobert-large-msmarco" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "monobert-large-msmarco" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "monobert-large-msmarco", + "Core17InstructionRetrieval": -0.24, + "News21InstructionRetrieval": -0.8, + "Robust04InstructionRetrieval": -9.36 + } + ] + } + }, + "bge-large-en-v1.5": { + "BitextMining": { + "f1": [ + { + "Model": "bge-large-en-v1.5" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "bge-large-en-v1.5" } ] }, "Clustering": { "v_measure": [ { - "Model": "LaBSE", - "8TagsClustering": 12.96, - "AlloProfClusteringP2P": 54.78, - "AlloProfClusteringS2S": 31.6, - "ArxivClusteringP2P": 32.13, - "ArxivClusteringS2S": 22.05, - "BiorxivClusteringP2P": 29.84, - "BiorxivClusteringS2S": 20.57, - "GeoreviewClusteringP2P (rus-Cyrl)": 52.19, - "HALClusteringS2S": 20.62, - "MLSUMClusteringP2P (rus-Cyrl)": 39.45, - "MLSUMClusteringP2P": 42.09, - "MLSUMClusteringS2S (rus-Cyrl)": 35.77, - "MLSUMClusteringS2S": 34.84, - "MasakhaNEWSClusteringP2P (amh-Ethi)": 67.78, - "MasakhaNEWSClusteringP2P (eng)": 48.16, - "MasakhaNEWSClusteringP2P (fra-Latn)": 46.16, - "MasakhaNEWSClusteringP2P (hau-Latn)": 39.77, - "MasakhaNEWSClusteringP2P (ibo-Latn)": 62.67, - "MasakhaNEWSClusteringP2P (lin-Latn)": 62.98, - "MasakhaNEWSClusteringP2P (lug-Latn)": 47.76, - "MasakhaNEWSClusteringP2P (orm-Ethi)": 28.76, - "MasakhaNEWSClusteringP2P (pcm-Latn)": 77.16, - "MasakhaNEWSClusteringP2P (run-Latn)": 60.36, - "MasakhaNEWSClusteringP2P (sna-Latn)": 63.57, - "MasakhaNEWSClusteringP2P (som-Latn)": 34.94, - "MasakhaNEWSClusteringP2P (swa-Latn)": 27.26, - "MasakhaNEWSClusteringP2P (tir-Ethi)": 51.59, - "MasakhaNEWSClusteringP2P (xho-Latn)": 45.32, - "MasakhaNEWSClusteringP2P (yor-Latn)": 48.73, - "MasakhaNEWSClusteringP2P (fra)": 46.16, - "MasakhaNEWSClusteringS2S (amh-Ethi)": 52.73, - "MasakhaNEWSClusteringS2S (eng)": 32.6, - "MasakhaNEWSClusteringS2S (fra-Latn)": 38.13, - "MasakhaNEWSClusteringS2S (hau-Latn)": 31.62, - "MasakhaNEWSClusteringS2S (ibo-Latn)": 32.27, - "MasakhaNEWSClusteringS2S (lin-Latn)": 49.38, - "MasakhaNEWSClusteringS2S (lug-Latn)": 47.63, - "MasakhaNEWSClusteringS2S (orm-Ethi)": 25.05, - "MasakhaNEWSClusteringS2S (pcm-Latn)": 68.18, - "MasakhaNEWSClusteringS2S (run-Latn)": 52.39, - "MasakhaNEWSClusteringS2S (sna-Latn)": 46.9, - "MasakhaNEWSClusteringS2S (som-Latn)": 24.08, - "MasakhaNEWSClusteringS2S (swa-Latn)": 15.83, - "MasakhaNEWSClusteringS2S (tir-Ethi)": 49.07, - "MasakhaNEWSClusteringS2S (xho-Latn)": 28.52, - "MasakhaNEWSClusteringS2S (yor-Latn)": 32.26, - "MasakhaNEWSClusteringS2S (fra)": 38.13, - "MedrxivClusteringP2P": 30.13, - "MedrxivClusteringS2S": 24.82, - "RedditClustering": 28.79, - "RedditClusteringP2P": 49.14, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 49.09, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.97, - "StackExchangeClustering": 35.43, - "StackExchangeClusteringP2P": 28.83, - "TwentyNewsgroupsClustering": 23.28 + "Model": "bge-large-en-v1.5" } ] }, "PairClassification": { "ap": [ { - "Model": "LaBSE", - "CDSC-E (pol-Latn)": 68.92, - "CDSC-E": 68.91, - "OpusparcusPC (deu-Latn)": 96.58, - "OpusparcusPC (en)": 98.12, - "OpusparcusPC (fin-Latn)": 94.44, - "OpusparcusPC (fra-Latn)": 93.96, - "OpusparcusPC (rus-Cyrl)": 87.3, - "OpusparcusPC (swe-Latn)": 93.69, - "OpusparcusPC (fr)": 93.96, - "PPC": 86.97, - "PSC (pol-Latn)": 97.42, - "PSC": 97.42, - "PawsXPairClassification (deu-Latn)": 51.07, - "PawsXPairClassification (en)": 54.07, - "PawsXPairClassification (spa-Latn)": 52.19, - "PawsXPairClassification (fra-Latn)": 54.63, - "PawsXPairClassification (jpn-Hira)": 47.56, - "PawsXPairClassification (kor-Hang)": 49.39, - "PawsXPairClassification (cmn-Hans)": 54.26, - "PawsXPairClassification (fr)": 54.63, - "SICK-E-PL (pol-Latn)": 63.77, - "SICK-E-PL": 63.77, - "SprintDuplicateQuestions": 89.26, - "TERRa (rus-Cyrl)": 55.71, - "TwitterSemEval2015": 62.78, - "TwitterURLCorpus": 84.58 + "Model": "bge-large-en-v1.5" } ] }, "Reranking": { "map": [ { - "Model": "LaBSE", - "AlloprofReranking (fra-Latn)": 55.37, - "AlloprofReranking": 49.51, - "AskUbuntuDupQuestions": 52.75, - "MMarcoReranking (cmn-Hans)": 14.83, - "MindSmallReranking": 29.81, - "RuBQReranking (rus-Cyrl)": 55.13, - "SciDocsRR": 68.72, - "StackOverflowDupQuestions": 42.42, - "SyntecReranking (fra-Latn)": 67.62, - "SyntecReranking": 73.28, - "T2Reranking (cmn-Hans)": 63.29 + "Model": "bge-large-en-v1.5" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "LaBSE", - "AILACasedocs": 17.67, - "AILAStatutes": 16.72, - "ARCChallenge": 3.78, - "AlloprofRetrieval (fra-Latn)": 19.77, - "AlloprofRetrieval": 19.77, - "AlphaNLI": 13.11, - "ArguAna": 34.18, - "ArguAna-PL (pol-Latn)": 38.56, - "ArguAna-PL": 38.52, - "BSARDRetrieval (fra-Latn)": 4.44, - "BSARDRetrieval": 0.0, - "CQADupstackRetrieval": 18.75, - "ClimateFEVER": 3.83, - "CmedqaRetrieval (cmn-Hans)": 5.49, - "CovidRetrieval (cmn-Hans)": 28.6, - "DBPedia": 15.57, - "DBPedia-PL": 16.1, - "DuRetrieval (cmn-Hans)": 26.34, - "EcomRetrieval (cmn-Hans)": 25.42, - "FEVER": 12.18, - "FiQA-PL (pol-Latn)": 7.66, - "FiQA-PL": 7.63, - "FiQA2018": 7.0, - "GerDaLIRSmall (deu-Latn)": 4.59, - "HellaSwag": 5.59, - "HotpotQA": 18.75, - "HotpotQA-PL": 19.72, - "LEMBNarrativeQARetrieval": 11.45, - "LEMBNeedleRetrieval": 17.5, - "LEMBPasskeyRetrieval": 20.25, - "LEMBQMSumRetrieval": 14.07, - "LEMBSummScreenFDRetrieval": 40.52, - "LEMBWikimQARetrieval": 28.1, - "LeCaRDv2 (zho-Hans)": 24.68, - "LegalBenchConsumerContractsQA": 54.66, - "LegalBenchCorporateLobbying": 69.39, - "LegalQuAD (deu-Latn)": 16.64, - "LegalSummarization": 53.89, - "MMarcoRetrieval (cmn-Hans)": 34.78, - "MSMARCO": 7.6, - "MSMARCO-PL": 7.22, - "MedicalRetrieval (cmn-Hans)": 6.68, - "MintakaRetrieval (ara-Arab)": 14.06, - "MintakaRetrieval (deu-Latn)": 15.26, - "MintakaRetrieval (spa-Latn)": 15.65, - "MintakaRetrieval (fra-Latn)": 15.53, - "MintakaRetrieval (hin-Deva)": 13.67, - "MintakaRetrieval (ita-Latn)": 15.94, - "MintakaRetrieval (jpn-Hira)": 12.8, - "MintakaRetrieval (por-Latn)": 15.03, - "MintakaRetrieval (fr)": 15.53, - "NFCorpus": 16.54, - "NFCorpus-PL (pol-Latn)": 17.45, - "NFCorpus-PL": 17.45, - "NQ": 8.42, - "NQ-PL": 9.65, - "PIQA": 6.53, - "Quail": 1.91, - "Quora-PL": 74.96, - "QuoraRetrieval": 77.03, - "RARbCode": 2.31, - "RARbMath": 27.19, - "RiaNewsRetrieval (rus-Cyrl)": 42.75, - "RuBQRetrieval (rus-Cyrl)": 30.02, - "SCIDOCS": 5.63, - "SCIDOCS-PL (pol-Latn)": 7.47, - "SCIDOCS-PL": 7.48, - "SIQA": 1.07, - "SciFact": 38.2, - "SciFact-PL (pol-Latn)": 39.79, - "SciFact-PL": 39.79, - "SpartQA": 1.56, - "SyntecRetrieval (fra-Latn)": 55.31, - "SyntecRetrieval": 55.31, - "T2Retrieval (cmn-Hans)": 25.32, - "TRECCOVID": 16.34, - "TRECCOVID-PL (pol-Latn)": 18.51, - "TRECCOVID-PL": 18.45, - "TempReasonL1": 1.56, - "TempReasonL2Fact": 7.06, - "TempReasonL2Pure": 0.14, - "TempReasonL3Fact": 8.74, - "TempReasonL3Pure": 4.73, - "Touche2020": 4.88, - "VideoRetrieval (cmn-Hans)": 22.04, - "WinoGrande": 54.3, - "XPQARetrieval (ara-Arab_ara-Arab)": 35.19, - "XPQARetrieval (eng-Latn_ara-Arab)": 20.64, - "XPQARetrieval (ara-Arab_eng-Latn)": 32.47, - "XPQARetrieval (deu-Latn_deu-Latn)": 53.56, - "XPQARetrieval (eng-Latn_deu-Latn)": 24.31, - "XPQARetrieval (deu-Latn_eng-Latn)": 54.87, - "XPQARetrieval (spa-Latn_spa-Latn)": 44.49, - "XPQARetrieval (eng-Latn_spa-Latn)": 25.31, - "XPQARetrieval (spa-Latn_eng-Latn)": 43.4, - "XPQARetrieval (fra-Latn_fra-Latn)": 51.74, - "XPQARetrieval (eng-Latn_fra-Latn)": 21.29, - "XPQARetrieval (fra-Latn_eng-Latn)": 49.4, - "XPQARetrieval (hin-Deva_hin-Deva)": 66.64, - "XPQARetrieval (eng-Latn_hin-Deva)": 23.25, - "XPQARetrieval (hin-Deva_eng-Latn)": 64.54, - "XPQARetrieval (ita-Latn_ita-Latn)": 56.27, - "XPQARetrieval (eng-Latn_ita-Latn)": 25.8, - "XPQARetrieval (ita-Latn_eng-Latn)": 52.69, - "XPQARetrieval (jpn-Hira_jpn-Hira)": 58.6, - "XPQARetrieval (eng-Latn_jpn-Hira)": 21.49, - "XPQARetrieval (jpn-Hira_eng-Latn)": 52.41, - "XPQARetrieval (kor-Hang_kor-Hang)": 27.66, - "XPQARetrieval (eng-Latn_kor-Hang)": 23.33, - "XPQARetrieval (kor-Hang_eng-Latn)": 23.96, - "XPQARetrieval (pol-Latn_pol-Latn)": 37.33, - "XPQARetrieval (eng-Latn_pol-Latn)": 16.19, - "XPQARetrieval (pol-Latn_eng-Latn)": 37.7, - "XPQARetrieval (por-Latn_por-Latn)": 38.49, - "XPQARetrieval (eng-Latn_por-Latn)": 19.41, - "XPQARetrieval (por-Latn_eng-Latn)": 37.33, - "XPQARetrieval (tam-Taml_tam-Taml)": 37.32, - "XPQARetrieval (eng-Latn_tam-Taml)": 20.53, - "XPQARetrieval (tam-Taml_eng-Latn)": 30.14, - "XPQARetrieval (cmn-Hans_cmn-Hans)": 50.7, - "XPQARetrieval (eng-Latn_cmn-Hans)": 20.59, - "XPQARetrieval (cmn-Hans_eng-Latn)": 48.23, - "XPQARetrieval (fr)": 51.74 + "Model": "bge-large-en-v1.5", + "AILACasedocs": 25.15, + "AILAStatutes": 20.74, + "ARCChallenge": 9.99, + "AlphaNLI": 13.13, + "BrightRetrieval (stackoverflow)": 9.51, + "BrightRetrieval (earth_science)": 24.15, + "BrightRetrieval (aops)": 6.08, + "BrightRetrieval (sustainable_living)": 13.27, + "BrightRetrieval (psychology)": 17.44, + "BrightRetrieval (robotics)": 12.21, + "BrightRetrieval (theoremqa_theorems)": 5.51, + "BrightRetrieval (pony)": 5.64, + "BrightRetrieval (biology)": 11.96, + "BrightRetrieval (theoremqa_questions)": 12.56, + "BrightRetrieval (leetcode)": 26.68, + "BrightRetrieval (economics)": 16.59, + "GerDaLIRSmall": 3.96, + "HellaSwag": 28.5, + "LeCaRDv2": 22.68, + "LegalBenchConsumerContractsQA": 73.52, + "LegalBenchCorporateLobbying": 91.51, + "LegalQuAD": 16.22, + "LegalSummarization": 59.99, + "PIQA": 27.99, + "Quail": 1.83, + "RARbCode": 48.12, + "RARbMath": 57.36, + "SIQA": 1.04, + "SpartQA": 2.99, + "TempReasonL1": 1.46, + "TempReasonL2Fact": 24.25, + "TempReasonL2Pure": 2.35, + "TempReasonL3Fact": 20.64, + "TempReasonL3Pure": 6.67, + "WinoGrande": 19.18 } ] }, "STS": { "spearman": [ { - "Model": "LaBSE", - "AFQMC (cmn-Hans)": 21.02, - "ATEC (cmn-Hans)": 26.61, - "BIOSSES": 78.7, - "BQ (cmn-Hans)": 42.6, - "CDSC-R (pol-Latn)": 85.53, - "CDSC-R": 85.53, - "LCQMC (cmn-Hans)": 52.19, - "PAWSX (cmn-Hans)": 10.23, - "RUParaPhraserSTS (rus-Cyrl)": 65.74, - "RuSTSBenchmarkSTS (rus-Cyrl)": 73.34, - "SICK-R": 69.99, - "SICK-R-PL (pol-Latn)": 65.9, - "SICK-R-PL": 65.9, - "SICKFr (fra-Latn)": 69.94, - "SICKFr": 69.94, - "STS12": 65.08, - "STS13": 67.98, - "STS14": 64.03, - "STS15": 76.59, - "STS16": 72.98, - "STS17 (nld-Latn_eng-Latn)": 75.22, - "STS17 (eng-Latn_tur-Latn)": 72.07, - "STS17 (spa-Latn)": 80.83, - "STS17 (kor-Hang)": 71.32, - "STS17 (eng-Latn_deu-Latn)": 73.85, - "STS17 (ita-Latn_eng-Latn)": 76.99, - "STS17 (eng-Latn_ara-Arab)": 74.51, - "STS17 (ara-Arab)": 69.07, - "STS17 (fra-Latn_eng-Latn)": 76.98, - "STS17 (spa-Latn_eng-Latn)": 65.71, - "STS17 (en-en)": 79.45, - "STS17 (ar-ar)": 69.07, - "STS17 (en-ar)": 74.51, - "STS17 (en-de)": 73.85, - "STS17 (en-tr)": 72.07, - "STS17 (es-en)": 65.71, - "STS17 (es-es)": 80.83, - "STS17 (fr-en)": 76.98, - "STS17 (it-en)": 76.99, - "STS17 (ko-ko)": 71.32, - "STS17 (nl-en)": 75.22, - "STS22 (cmn-Hans)": 63.02, - "STS22 (spa-Latn)": 63.18, - "STS22 (en)": 60.97, - "STS22 (spa-Latn_ita-Latn)": 69.69, - "STS22 (deu-Latn)": 48.58, - "STS22 (fra-Latn)": 77.95, - "STS22 (ara-Arab)": 57.67, - "STS22 (spa-Latn_eng-Latn)": 71.86, - "STS22 (pol-Latn_eng-Latn)": 69.41, - "STS22 (ita-Latn)": 72.22, - "STS22 (pol-Latn)": 39.3, - "STS22 (deu-Latn_fra-Latn)": 53.28, - "STS22 (deu-Latn_pol-Latn)": 58.69, - "STS22 (fra-Latn_pol-Latn)": 61.98, - "STS22 (cmn-Hans_eng-Latn)": 64.02, - "STS22 (tur-Latn)": 58.15, - "STS22 (deu-Latn_eng-Latn)": 50.14, - "STS22 (rus-Cyrl)": 57.49, - "STS22 (ar)": 57.67, - "STS22 (de)": 48.58, - "STS22 (de-en)": 50.14, - "STS22 (de-fr)": 53.28, - "STS22 (de-pl)": 58.69, - "STS22 (es)": 63.18, - "STS22 (es-en)": 71.86, - "STS22 (es-it)": 69.69, - "STS22 (fr)": 77.95, - "STS22 (fr-pl)": 61.98, - "STS22 (it)": 72.22, - "STS22 (pl)": 39.28, - "STS22 (pl-en)": 69.41, - "STS22 (ru)": 57.49, - "STS22 (tr)": 58.15, - "STS22 (zh)": 63.02, - "STS22 (zh-en)": 64.02, - "STSB (cmn-Hans)": 68.38, - "STSBenchmark": 72.25, - "STSBenchmarkMultilingualSTS (en)": 72.25, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 73.06, - "STSBenchmarkMultilingualSTS (fra-Latn)": 75.1, - "STSBenchmarkMultilingualSTS (spa-Latn)": 72.92, - "STSBenchmarkMultilingualSTS (nld-Latn)": 70.22, - "STSBenchmarkMultilingualSTS (cmn-Hans)": 69.5, - "STSBenchmarkMultilingualSTS (ita-Latn)": 72.97, - "STSBenchmarkMultilingualSTS (por-Latn)": 71.65, - "STSBenchmarkMultilingualSTS (deu-Latn)": 72.43, - "STSBenchmarkMultilingualSTS (pol-Latn)": 72.58, - "STSBenchmarkMultilingualSTS (fr)": 75.1 + "Model": "bge-large-en-v1.5" } ] }, "Summarization": { "spearman": [ { - "Model": "LaBSE", - "SummEval": 31.05, - "SummEvalFr (fra-Latn)": 30.16, - "SummEvalFr": 30.16 + "Model": "bge-large-en-v1.5" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "LaBSE" + "Model": "bge-large-en-v1.5" } ] } }, - "sentence-camembert-large": { + "SFR-Embedding-Mistral": { "BitextMining": { "f1": [ { - "Model": "sentence-camembert-large" + "Model": "SFR-Embedding-Mistral" } ] }, "Classification": { "accuracy": [ { - "Model": "sentence-camembert-large", - "AmazonReviewsClassification (fr)": 37.97, - "MTOPDomainClassification (fr)": 85.74, - "MTOPIntentClassification (fr)": 58.62, - "MasakhaNEWSClassification (fra)": 80.62, - "MassiveIntentClassification (fr)": 62.65, - "MassiveScenarioClassification (fr)": 69.29 + "Model": "SFR-Embedding-Mistral" } ] }, "Clustering": { "v_measure": [ { - "Model": "sentence-camembert-large", - "AlloProfClusteringP2P": 62.69, - "AlloProfClusteringS2S": 42.06, - "HALClusteringS2S": 23.9, - "MLSUMClusteringP2P": 42.04, - "MLSUMClusteringS2S": 32.29, - "MasakhaNEWSClusteringP2P (fra)": 54.51, - "MasakhaNEWSClusteringS2S (fra)": 44.73 + "Model": "SFR-Embedding-Mistral" } ] }, "PairClassification": { "ap": [ { - "Model": "sentence-camembert-large", - "OpusparcusPC (fr)": 94.63, - "PawsXPairClassification (fr)": 59.59 + "Model": "SFR-Embedding-Mistral" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "SFR-Embedding-Mistral" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "SFR-Embedding-Mistral", + "BrightRetrieval (sustainable_living)": 19.79, + "BrightRetrieval (economics)": 17.84, + "BrightRetrieval (theoremqa_theorems)": 24.05, + "BrightRetrieval (aops)": 7.43, + "BrightRetrieval (theoremqa_questions)": 23.05, + "BrightRetrieval (psychology)": 18.97, + "BrightRetrieval (stackoverflow)": 12.72, + "BrightRetrieval (pony)": 1.97, + "BrightRetrieval (leetcode)": 27.35, + "BrightRetrieval (biology)": 19.49, + "BrightRetrieval (earth_science)": 26.63, + "BrightRetrieval (robotics)": 16.7 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "SFR-Embedding-Mistral" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "SFR-Embedding-Mistral" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "SFR-Embedding-Mistral" + } + ] + } + }, + "nomic-embed-text-v1.5-512": { + "BitextMining": { + "f1": [ + { + "Model": "nomic-embed-text-v1.5-512" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "nomic-embed-text-v1.5-512", + "AmazonCounterfactualClassification (en)": 74.27, + "AmazonPolarityClassification": 91.89, + "AmazonReviewsClassification (en)": 46.97, + "Banking77Classification": 84.15, + "EmotionClassification": 47.73, + "ImdbClassification": 85.47, + "MTOPDomainClassification (en)": 92.62, + "MTOPIntentClassification (en)": 74.27, + "MassiveIntentClassification (en)": 73.07, + "MassiveScenarioClassification (en)": 76.82, + "ToxicConversationsClassification": 71.25, + "TweetSentimentExtractionClassification": 60.4 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "nomic-embed-text-v1.5-512", + "ArxivClusteringP2P": 45.45, + "ArxivClusteringS2S": 36.19, + "BiorxivClusteringP2P": 38.41, + "BiorxivClusteringS2S": 32.28, + "MedrxivClusteringP2P": 34.47, + "MedrxivClusteringS2S": 31.43, + "RedditClustering": 55.9, + "RedditClusteringP2P": 60.58, + "StackExchangeClustering": 62.94, + "StackExchangeClusteringP2P": 33.81, + "TwentyNewsgroupsClustering": 49.36 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "nomic-embed-text-v1.5-512", + "SprintDuplicateQuestions": 92.91, + "TwitterSemEval2015": 74.3, + "TwitterURLCorpus": 86.57 } ] }, "Reranking": { "map": [ { - "Model": "sentence-camembert-large", - "AlloprofReranking": 57.62, - "SyntecReranking": 88.15 + "Model": "nomic-embed-text-v1.5-512", + "AskUbuntuDupQuestions": 61.6, + "MindSmallReranking": 30.34, + "SciDocsRR": 80.33, + "StackOverflowDupQuestions": 50.32 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "sentence-camembert-large", - "AlloprofRetrieval": 31.62, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 21.87, - "SyntecRetrieval": 81.11, - "XPQARetrieval (fr)": 65.62 + "Model": "nomic-embed-text-v1.5-512", + "ArguAna": 47.45, + "CQADupstackRetrieval": 39.06, + "ClimateFEVER": 40.7, + "DBPedia": 42.96, + "FEVER": 85.7, + "FiQA2018": 36.92, + "HotpotQA": 71.48, + "MSMARCO": 42.29, + "NFCorpus": 33.31, + "NQ": 58.83, + "QuoraRetrieval": 87.87, + "SCIDOCS": 17.88, + "SciFact": 70.12, + "TRECCOVID": 82.12, + "Touche2020": 29.24 } ] }, "STS": { "spearman": [ { - "Model": "sentence-camembert-large", - "SICKFr": 77.7, - "STS22 (fr)": 81.73, - "STSBenchmarkMultilingualSTS (fr)": 85.79 + "Model": "nomic-embed-text-v1.5-512", + "BIOSSES": 83.3, + "SICK-R": 79.27, + "STS12": 78.3, + "STS13": 85.81, + "STS14": 81.38, + "STS15": 86.79, + "STS16": 84.56, + "STS17 (en-en)": 87.25, + "STS22 (en)": 65.24, + "STSBenchmark": 85.14 } ] }, "Summarization": { "spearman": [ { - "Model": "sentence-camembert-large", - "SummEvalFr": 30.88 + "Model": "nomic-embed-text-v1.5-512", + "SummEval": 30.47 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "sentence-camembert-large" + "Model": "nomic-embed-text-v1.5-512" } ] } }, - "nb-bert-large": { + "st-polish-paraphrase-from-mpnet": { "BitextMining": { "f1": [ { - "Model": "nb-bert-large", - "BornholmBitextMining": 4.53 + "Model": "st-polish-paraphrase-from-mpnet" } ] }, "Classification": { "accuracy": [ { - "Model": "nb-bert-large", - "AngryTweetsClassification": 52.14, - "DKHateClassification": 62.13, - "DanishPoliticalCommentsClassification": 35.04, - "LccSentimentClassification": 56.27, - "MassiveIntentClassification (da)": 57.03, - "MassiveIntentClassification (nb)": 62.68, - "MassiveIntentClassification (sv)": 55.02, - "MassiveScenarioClassification (da)": 60.43, - "MassiveScenarioClassification (nb)": 67.44, - "MassiveScenarioClassification (sv)": 57.12, - "NoRecClassification": 55.46, - "NordicLangClassification": 85.27, - "NorwegianParliament": 62.58, - "ScalaDaClassification": 62.85, - "ScalaNbClassification": 66.97 + "Model": "st-polish-paraphrase-from-mpnet", + "AllegroReviews": 34.55, + "CBD": 67.48, + "MassiveIntentClassification (pl)": 65.93, + "MassiveScenarioClassification (pl)": 71.85, + "PAC": 63.25, + "PolEmo2.0-IN": 68.37, + "PolEmo2.0-OUT": 30.99 } ] }, "Clustering": { "v_measure": [ { - "Model": "nb-bert-large" + "Model": "st-polish-paraphrase-from-mpnet", + "8TagsClustering": 33.15 } ] }, "PairClassification": { "ap": [ { - "Model": "nb-bert-large" + "Model": "st-polish-paraphrase-from-mpnet", + "CDSC-E": 75.06, + "PPC": 93.49, + "PSC": 99.05, + "SICK-E-PL": 80.56 } ] }, "Reranking": { "map": [ { - "Model": "nb-bert-large" + "Model": "st-polish-paraphrase-from-mpnet" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "nb-bert-large" + "Model": "st-polish-paraphrase-from-mpnet", + "ArguAna-PL": 51.87, + "DBPedia-PL": 24.59, + "FiQA-PL": 22.27, + "HotpotQA-PL": 32.11, + "MSMARCO-PL": 17.91, + "NFCorpus-PL": 24.05, + "NQ-PL": 23.54, + "Quora-PL": 81.49, + "SCIDOCS-PL": 13.23, + "SciFact-PL": 52.51, + "TRECCOVID-PL": 35.23 } ] }, "STS": { "spearman": [ { - "Model": "nb-bert-large" + "Model": "st-polish-paraphrase-from-mpnet", + "CDSC-R": 88.55, + "SICK-R-PL": 76.18, + "STS22 (pl)": 37.34 } ] }, "Summarization": { "spearman": [ { - "Model": "nb-bert-large" + "Model": "st-polish-paraphrase-from-mpnet" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "nb-bert-large" + "Model": "st-polish-paraphrase-from-mpnet" } ] } }, - "bge-small-zh-v1.5": { + "m3e-large": { "BitextMining": { "f1": [ { - "Model": "bge-small-zh-v1.5" + "Model": "m3e-large" } ] }, "Classification": { "accuracy": [ { - "Model": "bge-small-zh-v1.5", - "AmazonReviewsClassification (zh)": 35.91, - "IFlyTek": 45.49, - "JDReview": 80.04, - "MassiveIntentClassification (zh-CN)": 63.95, - "MassiveScenarioClassification (zh-CN)": 70.8, - "MultilingualSentiment": 63.06, - "OnlineShopping": 85.05, - "TNews": 48.15, - "Waimai": 83.18 + "Model": "m3e-large", + "AmazonReviewsClassification (zh)": 44.44, + "IFlyTek": 43.96, + "JDReview": 86.92, + "MassiveIntentClassification (zh-CN)": 67.23, + "MassiveScenarioClassification (zh-CN)": 74.88, + "MultilingualSentiment": 72.47, + "OnlineShopping": 89.59, + "TNews": 48.26, + "Waimai": 86.08 } ] }, "Clustering": { "v_measure": [ { - "Model": "bge-small-zh-v1.5", - "CLSClusteringP2P": 38.14, - "CLSClusteringS2S": 35.14, - "ThuNewsClusteringP2P": 54.22, - "ThuNewsClusteringS2S": 49.22 + "Model": "m3e-large", + "CLSClusteringP2P": 38.6, + "CLSClusteringS2S": 38.02, + "ThuNewsClusteringP2P": 60.39, + "ThuNewsClusteringS2S": 58.51 } ] }, "PairClassification": { "ap": [ { - "Model": "bge-small-zh-v1.5", - "Cmnli": 76.24, - "Ocnli": 64.57 + "Model": "m3e-large", + "Cmnli": 69.27, + "Ocnli": 59.33 } ] }, "Reranking": { "map": [ { - "Model": "bge-small-zh-v1.5", - "CMedQAv1": 77.4, - "CMedQAv2": 79.86, - "MMarcoReranking": 20.5, - "T2Reranking": 65.9 + "Model": "m3e-large", + "CMedQAv1": 77.76, + "CMedQAv2": 78.27, + "MMarcoReranking": 16.46, + "T2Reranking": 66.13 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "bge-small-zh-v1.5", - "CmedqaRetrieval": 35.11, - "CovidRetrieval": 70.14, - "DuRetrieval": 77.28, - "EcomRetrieval": 55.71, - "MMarcoRetrieval": 63.48, - "MedicalRetrieval": 49.8, - "T2Retrieval": 76.43, - "VideoRetrieval": 66.19 + "Model": "m3e-large", + "CmedqaRetrieval": 30.73, + "CovidRetrieval": 61.33, + "DuRetrieval": 74.69, + "EcomRetrieval": 45.18, + "MMarcoRetrieval": 61.06, + "MedicalRetrieval": 48.66, + "T2Retrieval": 72.36, + "VideoRetrieval": 44.02 } ] }, "STS": { "spearman": [ { - "Model": "bge-small-zh-v1.5", - "AFQMC": 33.42, - "ATEC": 43.01, - "BQ": 55.22, - "LCQMC": 72.19, - "PAWSX": 9.26, - "QBQTC": 35.29, - "STS22 (zh)": 67.72, - "STSB": 76.73 + "Model": "m3e-large", + "AFQMC": 36.53, + "ATEC": 41.8, + "BQ": 65.2, + "LCQMC": 74.2, + "PAWSX": 15.95, + "QBQTC": 32.65, + "STS22 (zh)": 62.91, + "STSB": 74.16 } ] }, "Summarization": { "spearman": [ { - "Model": "bge-small-zh-v1.5" + "Model": "m3e-large" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "bge-small-zh-v1.5" + "Model": "m3e-large" } ] } }, - "bm25": { + "dfm-encoder-large-v1": { "BitextMining": { "f1": [ { - "Model": "bm25" + "Model": "dfm-encoder-large-v1", + "BornholmBitextMining": 11.65 } ] }, "Classification": { "accuracy": [ { - "Model": "bm25" + "Model": "dfm-encoder-large-v1", + "AngryTweetsClassification": 53.8, + "DKHateClassification": 60.09, + "DanishPoliticalCommentsClassification": 36.6, + "LccSentimentClassification": 57.33, + "MassiveIntentClassification (da)": 60.55, + "MassiveIntentClassification (nb)": 52.49, + "MassiveIntentClassification (sv)": 49.74, + "MassiveScenarioClassification (da)": 64.16, + "MassiveScenarioClassification (nb)": 54.59, + "MassiveScenarioClassification (sv)": 50.1, + "NoRecClassification": 48.3, + "NordicLangClassification": 77.68, + "NorwegianParliament": 58.78, + "ScalaDaClassification": 63.08, + "ScalaNbClassification": 58.95 } ] }, "Clustering": { "v_measure": [ { - "Model": "bm25" + "Model": "dfm-encoder-large-v1" } ] }, "PairClassification": { "ap": [ { - "Model": "bm25" + "Model": "dfm-encoder-large-v1" } ] }, "Reranking": { "map": [ { - "Model": "bm25" + "Model": "dfm-encoder-large-v1" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "bm25", - "BrightRetrieval (robotics)": 13.53, - "BrightRetrieval (pony)": 7.93, - "BrightRetrieval (leetcode)": 24.37, - "BrightRetrieval (earth_science)": 27.06, - "BrightRetrieval (stackoverflow)": 16.55, - "BrightRetrieval (economics)": 14.87, - "BrightRetrieval (theoremqa_questions)": 9.78, - "BrightRetrieval (theoremqa_theorems)": 4.25, - "BrightRetrieval (psychology)": 12.51, - "BrightRetrieval (sustainable_living)": 15.22, - "BrightRetrieval (biology)": 19.19, - "BrightRetrieval (aops)": 6.2 + "Model": "dfm-encoder-large-v1" } ] }, "STS": { "spearman": [ { - "Model": "bm25" + "Model": "dfm-encoder-large-v1" } ] }, "Summarization": { "spearman": [ { - "Model": "bm25" + "Model": "dfm-encoder-large-v1" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "bm25", - "Core17InstructionRetrieval": -1.06, - "News21InstructionRetrieval": -2.15, - "Robust04InstructionRetrieval": -3.06 + "Model": "dfm-encoder-large-v1" } ] } }, - "paraphrase-multilingual-MiniLM-L12-v2": { + "norbert3-large": { "BitextMining": { "f1": [ { - "Model": "paraphrase-multilingual-MiniLM-L12-v2", - "BUCC (de-en)": 97.11, - "BUCC (fr-en)": 94.99, - "BUCC (ru-en)": 95.06, - "BUCC (zh-en)": 95.63, - "BornholmBitextMining (dan-Latn)": 19.67, - "Tatoeba (nds-Latn_eng-Latn)": 32.16, - "Tatoeba (dtp-Latn_eng-Latn)": 5.69, - "Tatoeba (fry-Latn_eng-Latn)": 31.13, - "Tatoeba (rus-Cyrl_eng-Latn)": 91.87, - "Tatoeba (cmn-Hans_eng-Latn)": 94.93, - "Tatoeba (afr-Latn_eng-Latn)": 58.22, - "Tatoeba (ast-Latn_eng-Latn)": 62.17, - "Tatoeba (kab-Latn_eng-Latn)": 1.16, - "Tatoeba (cor-Latn_eng-Latn)": 3.42, - "Tatoeba (est-Latn_eng-Latn)": 97.33, - "Tatoeba (cym-Latn_eng-Latn)": 13.25, - "Tatoeba (dsb-Latn_eng-Latn)": 33.43, - "Tatoeba (oci-Latn_eng-Latn)": 38.57, - "Tatoeba (zsm-Latn_eng-Latn)": 95.31, - "Tatoeba (yid-Hebr_eng-Latn)": 14.38, - "Tatoeba (bel-Cyrl_eng-Latn)": 67.73, - "Tatoeba (gle-Latn_eng-Latn)": 11.62, - "Tatoeba (slv-Latn_eng-Latn)": 96.92, - "Tatoeba (lvs-Latn_eng-Latn)": 97.87, - "Tatoeba (orv-Cyrl_eng-Latn)": 15.1, - "Tatoeba (bul-Cyrl_eng-Latn)": 92.65, - "Tatoeba (tgl-Latn_eng-Latn)": 13.09, - "Tatoeba (ind-Latn_eng-Latn)": 92.74, - "Tatoeba (mon-Cyrl_eng-Latn)": 95.04, - "Tatoeba (fao-Latn_eng-Latn)": 27.51, - "Tatoeba (fin-Latn_eng-Latn)": 93.1, - "Tatoeba (srp-Cyrl_eng-Latn)": 92.24, - "Tatoeba (bos-Latn_eng-Latn)": 93.27, - "Tatoeba (kor-Hang_eng-Latn)": 92.52, - "Tatoeba (cat-Latn_eng-Latn)": 94.42, - "Tatoeba (por-Latn_eng-Latn)": 92.13, - "Tatoeba (spa-Latn_eng-Latn)": 95.42, - "Tatoeba (ukr-Cyrl_eng-Latn)": 92.82, - "Tatoeba (war-Latn_eng-Latn)": 7.25, - "Tatoeba (hsb-Latn_eng-Latn)": 36.1, - "Tatoeba (dan-Latn_eng-Latn)": 94.8, - "Tatoeba (nov-Latn_eng-Latn)": 47.99, - "Tatoeba (kat-Geor_eng-Latn)": 95.44, - "Tatoeba (gla-Latn_eng-Latn)": 3.61, - "Tatoeba (ron-Latn_eng-Latn)": 95.3, - "Tatoeba (glg-Latn_eng-Latn)": 94.0, - "Tatoeba (vie-Latn_eng-Latn)": 95.12, - "Tatoeba (pol-Latn_eng-Latn)": 94.28, - "Tatoeba (hrv-Latn_eng-Latn)": 95.98, - "Tatoeba (fra-Latn_eng-Latn)": 91.72, - "Tatoeba (hye-Armn_eng-Latn)": 93.28, - "Tatoeba (ile-Latn_eng-Latn)": 57.71, - "Tatoeba (arz-Arab_eng-Latn)": 51.26, - "Tatoeba (nob-Latn_eng-Latn)": 97.73, - "Tatoeba (amh-Ethi_eng-Latn)": 36.21, - "Tatoeba (nld-Latn_eng-Latn)": 94.58, - "Tatoeba (swg-Latn_eng-Latn)": 26.31, - "Tatoeba (cha-Latn_eng-Latn)": 15.98, - "Tatoeba (nno-Latn_eng-Latn)": 76.34, - "Tatoeba (mal-Mlym_eng-Latn)": 32.2, - "Tatoeba (urd-Arab_eng-Latn)": 94.57, - "Tatoeba (uzb-Latn_eng-Latn)": 17.14, - "Tatoeba (swe-Latn_eng-Latn)": 94.42, - "Tatoeba (wuu-Hans_eng-Latn)": 76.0, - "Tatoeba (ceb-Latn_eng-Latn)": 8.05, - "Tatoeba (hin-Deva_eng-Latn)": 97.62, - "Tatoeba (ces-Latn_eng-Latn)": 95.12, - "Tatoeba (arq-Arab_eng-Latn)": 18.6, - "Tatoeba (jav-Latn_eng-Latn)": 17.04, - "Tatoeba (swh-Latn_eng-Latn)": 14.48, - "Tatoeba (kzj-Latn_eng-Latn)": 6.24, - "Tatoeba (jpn-Jpan_eng-Latn)": 90.41, - "Tatoeba (xho-Latn_eng-Latn)": 4.52, - "Tatoeba (csb-Latn_eng-Latn)": 21.56, - "Tatoeba (max-Deva_eng-Latn)": 45.25, - "Tatoeba (ben-Beng_eng-Latn)": 36.48, - "Tatoeba (ara-Arab_eng-Latn)": 87.93, - "Tatoeba (kur-Latn_eng-Latn)": 46.94, - "Tatoeba (lit-Latn_eng-Latn)": 93.16, - "Tatoeba (isl-Latn_eng-Latn)": 24.07, - "Tatoeba (cbk-Latn_eng-Latn)": 55.37, - "Tatoeba (uig-Arab_eng-Latn)": 24.39, - "Tatoeba (mhr-Cyrl_eng-Latn)": 6.89, - "Tatoeba (slk-Latn_eng-Latn)": 95.15, - "Tatoeba (tha-Thai_eng-Latn)": 96.72, - "Tatoeba (ell-Grek_eng-Latn)": 95.43, - "Tatoeba (pam-Latn_eng-Latn)": 5.41, - "Tatoeba (pes-Arab_eng-Latn)": 92.59, - "Tatoeba (yue-Hant_eng-Latn)": 71.45, - "Tatoeba (tur-Latn_eng-Latn)": 95.08, - "Tatoeba (tel-Telu_eng-Latn)": 36.4, - "Tatoeba (eus-Latn_eng-Latn)": 23.18, - "Tatoeba (ina-Latn_eng-Latn)": 79.13, - "Tatoeba (aze-Latn_eng-Latn)": 62.1, - "Tatoeba (lfn-Latn_eng-Latn)": 47.02, - "Tatoeba (heb-Hebr_eng-Latn)": 86.88, - "Tatoeba (mar-Deva_eng-Latn)": 92.38, - "Tatoeba (sqi-Latn_eng-Latn)": 98.17, - "Tatoeba (tat-Cyrl_eng-Latn)": 10.25, - "Tatoeba (lat-Latn_eng-Latn)": 19.47, - "Tatoeba (tzl-Latn_eng-Latn)": 25.46, - "Tatoeba (tuk-Latn_eng-Latn)": 15.16, - "Tatoeba (ang-Latn_eng-Latn)": 10.24, - "Tatoeba (bre-Latn_eng-Latn)": 5.56, - "Tatoeba (ber-Tfng_eng-Latn)": 4.43, - "Tatoeba (gsw-Latn_eng-Latn)": 25.74, - "Tatoeba (ita-Latn_eng-Latn)": 93.05, - "Tatoeba (awa-Deva_eng-Latn)": 33.43, - "Tatoeba (tam-Taml_eng-Latn)": 24.64, - "Tatoeba (mkd-Cyrl_eng-Latn)": 91.0, - "Tatoeba (hun-Latn_eng-Latn)": 91.58, - "Tatoeba (pms-Latn_eng-Latn)": 30.7, - "Tatoeba (epo-Latn_eng-Latn)": 41.73, - "Tatoeba (ido-Latn_eng-Latn)": 40.25, - "Tatoeba (khm-Khmr_eng-Latn)": 32.11, - "Tatoeba (kaz-Cyrl_eng-Latn)": 34.89, - "Tatoeba (deu-Latn_eng-Latn)": 97.02, - "Tatoeba (afr-eng)": 58.22, - "Tatoeba (amh-eng)": 36.21, - "Tatoeba (ang-eng)": 10.24, - "Tatoeba (ara-eng)": 87.93, - "Tatoeba (arq-eng)": 18.6, - "Tatoeba (arz-eng)": 51.26, - "Tatoeba (ast-eng)": 62.17, - "Tatoeba (awa-eng)": 33.43, - "Tatoeba (aze-eng)": 62.1, - "Tatoeba (bel-eng)": 67.73, - "Tatoeba (ben-eng)": 36.48, - "Tatoeba (ber-eng)": 4.43, - "Tatoeba (bos-eng)": 93.27, - "Tatoeba (bre-eng)": 5.56, - "Tatoeba (bul-eng)": 92.65, - "Tatoeba (cat-eng)": 94.42, - "Tatoeba (cbk-eng)": 55.37, - "Tatoeba (ceb-eng)": 8.05, - "Tatoeba (ces-eng)": 95.12, - "Tatoeba (cha-eng)": 15.98, - "Tatoeba (cmn-eng)": 94.93, - "Tatoeba (cor-eng)": 3.42, - "Tatoeba (csb-eng)": 21.56, - "Tatoeba (cym-eng)": 13.25, - "Tatoeba (dan-eng)": 94.8, - "Tatoeba (deu-eng)": 97.02, - "Tatoeba (dsb-eng)": 33.43, - "Tatoeba (dtp-eng)": 5.69, - "Tatoeba (ell-eng)": 95.43, - "Tatoeba (epo-eng)": 41.73, - "Tatoeba (est-eng)": 97.33, - "Tatoeba (eus-eng)": 23.18, - "Tatoeba (fao-eng)": 27.51, - "Tatoeba (fin-eng)": 93.1, - "Tatoeba (fra-eng)": 91.72, - "Tatoeba (fry-eng)": 31.13, - "Tatoeba (gla-eng)": 3.61, - "Tatoeba (gle-eng)": 11.62, - "Tatoeba (glg-eng)": 94.0, - "Tatoeba (gsw-eng)": 25.74, - "Tatoeba (heb-eng)": 86.88, - "Tatoeba (hin-eng)": 97.62, - "Tatoeba (hrv-eng)": 95.98, - "Tatoeba (hsb-eng)": 36.1, - "Tatoeba (hun-eng)": 91.58, - "Tatoeba (hye-eng)": 93.28, - "Tatoeba (ido-eng)": 40.25, - "Tatoeba (ile-eng)": 57.71, - "Tatoeba (ina-eng)": 79.13, - "Tatoeba (ind-eng)": 92.74, - "Tatoeba (isl-eng)": 24.07, - "Tatoeba (ita-eng)": 93.05, - "Tatoeba (jav-eng)": 17.04, - "Tatoeba (jpn-eng)": 90.41, - "Tatoeba (kab-eng)": 1.16, - "Tatoeba (kat-eng)": 95.44, - "Tatoeba (kaz-eng)": 34.89, - "Tatoeba (khm-eng)": 32.11, - "Tatoeba (kor-eng)": 92.52, - "Tatoeba (kur-eng)": 46.94, - "Tatoeba (kzj-eng)": 6.24, - "Tatoeba (lat-eng)": 19.47, - "Tatoeba (lfn-eng)": 47.02, - "Tatoeba (lit-eng)": 93.16, - "Tatoeba (lvs-eng)": 97.87, - "Tatoeba (mal-eng)": 32.2, - "Tatoeba (mar-eng)": 92.38, - "Tatoeba (max-eng)": 45.25, - "Tatoeba (mhr-eng)": 6.89, - "Tatoeba (mkd-eng)": 91.0, - "Tatoeba (mon-eng)": 95.04, - "Tatoeba (nds-eng)": 32.16, - "Tatoeba (nld-eng)": 94.58, - "Tatoeba (nno-eng)": 76.34, - "Tatoeba (nob-eng)": 97.73, - "Tatoeba (nov-eng)": 47.99, - "Tatoeba (oci-eng)": 38.57, - "Tatoeba (orv-eng)": 15.1, - "Tatoeba (pam-eng)": 5.41, - "Tatoeba (pes-eng)": 92.59, - "Tatoeba (pms-eng)": 30.7, - "Tatoeba (pol-eng)": 94.28, - "Tatoeba (por-eng)": 92.13, - "Tatoeba (ron-eng)": 95.3, - "Tatoeba (rus-eng)": 91.87, - "Tatoeba (slk-eng)": 95.15, - "Tatoeba (slv-eng)": 96.92, - "Tatoeba (spa-eng)": 95.42, - "Tatoeba (sqi-eng)": 98.17, - "Tatoeba (srp-eng)": 92.24, - "Tatoeba (swe-eng)": 94.42, - "Tatoeba (swg-eng)": 26.31, - "Tatoeba (swh-eng)": 14.48, - "Tatoeba (tam-eng)": 24.64, - "Tatoeba (tat-eng)": 10.25, - "Tatoeba (tel-eng)": 36.4, - "Tatoeba (tgl-eng)": 13.09, - "Tatoeba (tha-eng)": 96.72, - "Tatoeba (tuk-eng)": 15.16, - "Tatoeba (tur-eng)": 95.08, - "Tatoeba (tzl-eng)": 25.46, - "Tatoeba (uig-eng)": 24.39, - "Tatoeba (ukr-eng)": 92.82, - "Tatoeba (urd-eng)": 94.57, - "Tatoeba (uzb-eng)": 17.14, - "Tatoeba (vie-eng)": 95.12, - "Tatoeba (war-eng)": 7.25, - "Tatoeba (wuu-eng)": 76.0, - "Tatoeba (xho-eng)": 4.52, - "Tatoeba (yid-eng)": 14.38, - "Tatoeba (yue-eng)": 71.45, - "Tatoeba (zsm-eng)": 95.31 + "Model": "norbert3-large", + "BornholmBitextMining": 2.9 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "norbert3-large", + "AngryTweetsClassification": 49.04, + "DKHateClassification": 62.71, + "DanishPoliticalCommentsClassification": 33.53, + "LccSentimentClassification": 46.93, + "MassiveIntentClassification (da)": 45.98, + "MassiveIntentClassification (nb)": 47.42, + "MassiveIntentClassification (sv)": 48.47, + "MassiveScenarioClassification (da)": 50.51, + "MassiveScenarioClassification (nb)": 54.25, + "MassiveScenarioClassification (sv)": 50.6, + "NoRecClassification": 50.46, + "NordicLangClassification": 84.25, + "NorwegianParliament": 58.85, + "ScalaDaClassification": 60.72, + "ScalaNbClassification": 66.79 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "norbert3-large" } ] }, - "Classification": { - "accuracy": [ + "PairClassification": { + "ap": [ { - "Model": "paraphrase-multilingual-MiniLM-L12-v2", - "AllegroReviews (pol-Latn)": 30.85, - "AllegroReviews": 30.88, - "AmazonCounterfactualClassification (en-ext)": 69.99, - "AmazonCounterfactualClassification (en)": 71.57, - "AmazonCounterfactualClassification (deu-Latn)": 68.36, - "AmazonCounterfactualClassification (jpn-Jpan)": 63.37, - "AmazonCounterfactualClassification (de)": 68.35, - "AmazonCounterfactualClassification (ja)": 63.45, - "AmazonPolarityClassification": 69.21, - "AmazonReviewsClassification (en)": 35.11, - "AmazonReviewsClassification (deu-Latn)": 35.91, - "AmazonReviewsClassification (spa-Latn)": 37.49, - "AmazonReviewsClassification (fra-Latn)": 35.29, - "AmazonReviewsClassification (jpn-Jpan)": 33.21, - "AmazonReviewsClassification (cmn-Hans)": 35.24, - "AmazonReviewsClassification (de)": 35.91, - "AmazonReviewsClassification (es)": 37.49, - "AmazonReviewsClassification (fr)": 35.3, - "AmazonReviewsClassification (ja)": 33.24, - "AmazonReviewsClassification (zh)": 35.26, - "AngryTweetsClassification (dan-Latn)": 50.9, - "Banking77Classification": 79.77, - "CBD (pol-Latn)": 57.71, - "CBD": 57.68, - "DanishPoliticalCommentsClassification (dan-Latn)": 37.58, - "EmotionClassification": 42.37, - "GeoreviewClassification (rus-Cyrl)": 38.24, - "HeadlineClassification (rus-Cyrl)": 68.3, - "IFlyTek (cmn-Hans)": 39.88, - "ImdbClassification": 60.46, - "InappropriatenessClassification (rus-Cyrl)": 58.18, - "JDReview (cmn-Hans)": 70.26, - "KinopoiskClassification (rus-Cyrl)": 41.45, - "LccSentimentClassification (dan-Latn)": 54.53, - "MTOPDomainClassification (en)": 87.06, - "MTOPDomainClassification (deu-Latn)": 79.21, - "MTOPDomainClassification (spa-Latn)": 83.06, - "MTOPDomainClassification (fra-Latn)": 78.64, - "MTOPDomainClassification (hin-Deva)": 81.36, - "MTOPDomainClassification (tha-Thai)": 79.97, - "MTOPDomainClassification (de)": 79.2, - "MTOPDomainClassification (es)": 83.04, - "MTOPDomainClassification (fr)": 78.63, - "MTOPDomainClassification (hi)": 81.36, - "MTOPDomainClassification (th)": 79.99, - "MTOPIntentClassification (en)": 65.52, - "MTOPIntentClassification (deu-Latn)": 54.21, - "MTOPIntentClassification (spa-Latn)": 60.3, - "MTOPIntentClassification (fra-Latn)": 54.01, - "MTOPIntentClassification (hin-Deva)": 59.92, - "MTOPIntentClassification (tha-Thai)": 61.97, - "MTOPIntentClassification (de)": 54.23, - "MTOPIntentClassification (es)": 60.28, - "MTOPIntentClassification (fr)": 54.05, - "MTOPIntentClassification (hi)": 59.9, - "MTOPIntentClassification (th)": 61.96, - "MasakhaNEWSClassification (amh-Ethi)": 64.28, - "MasakhaNEWSClassification (eng)": 74.7, - "MasakhaNEWSClassification (fra-Latn)": 71.68, - "MasakhaNEWSClassification (hau-Latn)": 47.96, - "MasakhaNEWSClassification (ibo-Latn)": 42.46, - "MasakhaNEWSClassification (lin-Latn)": 59.26, - "MasakhaNEWSClassification (lug-Latn)": 42.29, - "MasakhaNEWSClassification (orm-Ethi)": 34.98, - "MasakhaNEWSClassification (pcm-Latn)": 89.54, - "MasakhaNEWSClassification (run-Latn)": 47.2, - "MasakhaNEWSClassification (sna-Latn)": 57.56, - "MasakhaNEWSClassification (som-Latn)": 34.8, - "MasakhaNEWSClassification (swa-Latn)": 46.05, - "MasakhaNEWSClassification (tir-Ethi)": 27.94, - "MasakhaNEWSClassification (xho-Latn)": 44.81, - "MasakhaNEWSClassification (yor-Latn)": 52.92, - "MasakhaNEWSClassification (fra)": 76.09, - "MassiveIntentClassification (en)": 66.89, - "MassiveIntentClassification (kat-Geor)": 43.03, - "MassiveIntentClassification (vie-Latn)": 56.62, - "MassiveIntentClassification (tur-Latn)": 59.91, - "MassiveIntentClassification (deu-Latn)": 50.71, - "MassiveIntentClassification (isl-Latn)": 30.87, - "MassiveIntentClassification (tam-Taml)": 36.82, - "MassiveIntentClassification (kan-Knda)": 41.0, - "MassiveIntentClassification (mon-Cyrl)": 51.77, - "MassiveIntentClassification (pol-Latn)": 59.48, - "MassiveIntentClassification (spa-Latn)": 59.7, - "MassiveIntentClassification (ben-Beng)": 35.38, - "MassiveIntentClassification (por-Latn)": 61.29, - "MassiveIntentClassification (amh-Ethi)": 36.77, - "MassiveIntentClassification (cym-Latn)": 26.13, - "MassiveIntentClassification (ind-Latn)": 59.9, - "MassiveIntentClassification (ron-Latn)": 58.44, - "MassiveIntentClassification (cmo-Hant)": 58.74, - "MassiveIntentClassification (dan-Latn)": 57.75, - "MassiveIntentClassification (swe-Latn)": 59.43, - "MassiveIntentClassification (ara-Arab)": 45.15, - "MassiveIntentClassification (ita-Latn)": 59.66, - "MassiveIntentClassification (jpn-Jpan)": 60.9, - "MassiveIntentClassification (swa-Latn)": 29.56, - "MassiveIntentClassification (cmo-Hans)": 62.0, - "MassiveIntentClassification (aze-Latn)": 47.43, - "MassiveIntentClassification (hin-Deva)": 58.37, - "MassiveIntentClassification (fra-Latn)": 60.24, - "MassiveIntentClassification (hun-Latn)": 60.44, - "MassiveIntentClassification (jav-Latn)": 32.37, - "MassiveIntentClassification (slv-Latn)": 57.34, - "MassiveIntentClassification (ell-Grek)": 58.7, - "MassiveIntentClassification (hye-Armn)": 51.6, - "MassiveIntentClassification (nob-Latn)": 55.52, - "MassiveIntentClassification (rus-Cyrl)": 59.06, - "MassiveIntentClassification (fas-Arab)": 61.03, - "MassiveIntentClassification (mal-Mlym)": 42.44, - "MassiveIntentClassification (tha-Thai)": 58.92, - "MassiveIntentClassification (afr-Latn)": 45.87, - "MassiveIntentClassification (tel-Telu)": 40.77, - "MassiveIntentClassification (urd-Arab)": 52.79, - "MassiveIntentClassification (tgl-Latn)": 33.67, - "MassiveIntentClassification (nld-Latn)": 59.52, - "MassiveIntentClassification (fin-Latn)": 57.56, - "MassiveIntentClassification (lav-Latn)": 54.72, - "MassiveIntentClassification (sqi-Latn)": 56.6, - "MassiveIntentClassification (khm-Khmr)": 40.04, - "MassiveIntentClassification (msa-Latn)": 54.81, - "MassiveIntentClassification (heb-Hebr)": 52.55, - "MassiveIntentClassification (mya-Mymr)": 52.03, - "MassiveIntentClassification (kor-Kore)": 50.36, - "MassiveIntentClassification (pl)": 59.43, - "MassiveIntentClassification (fr)": 57.52, - "MassiveScenarioClassification (khm-Khmr)": 46.95, - "MassiveScenarioClassification (kan-Knda)": 45.72, - "MassiveScenarioClassification (isl-Latn)": 37.55, - "MassiveScenarioClassification (nob-Latn)": 64.25, - "MassiveScenarioClassification (swe-Latn)": 67.14, - "MassiveScenarioClassification (nld-Latn)": 65.53, - "MassiveScenarioClassification (slv-Latn)": 64.01, - "MassiveScenarioClassification (jpn-Jpan)": 66.49, - "MassiveScenarioClassification (spa-Latn)": 65.07, - "MassiveScenarioClassification (kor-Kore)": 55.71, - "MassiveScenarioClassification (fas-Arab)": 65.89, - "MassiveScenarioClassification (jav-Latn)": 38.62, - "MassiveScenarioClassification (aze-Latn)": 52.09, - "MassiveScenarioClassification (kat-Geor)": 50.66, - "MassiveScenarioClassification (rus-Cyrl)": 65.25, - "MassiveScenarioClassification (fra-Latn)": 66.09, - "MassiveScenarioClassification (fin-Latn)": 63.74, - "MassiveScenarioClassification (dan-Latn)": 66.87, - "MassiveScenarioClassification (ben-Beng)": 41.19, - "MassiveScenarioClassification (tur-Latn)": 66.53, - "MassiveScenarioClassification (ind-Latn)": 66.17, - "MassiveScenarioClassification (por-Latn)": 65.83, - "MassiveScenarioClassification (cym-Latn)": 31.71, - "MassiveScenarioClassification (pol-Latn)": 65.04, - "MassiveScenarioClassification (sqi-Latn)": 64.34, - "MassiveScenarioClassification (mal-Mlym)": 47.73, - "MassiveScenarioClassification (tel-Telu)": 46.49, - "MassiveScenarioClassification (en)": 71.54, - "MassiveScenarioClassification (ell-Grek)": 66.14, - "MassiveScenarioClassification (tha-Thai)": 67.05, - "MassiveScenarioClassification (tgl-Latn)": 37.39, - "MassiveScenarioClassification (msa-Latn)": 61.73, - "MassiveScenarioClassification (ara-Arab)": 51.71, - "MassiveScenarioClassification (heb-Hebr)": 59.22, - "MassiveScenarioClassification (deu-Latn)": 57.4, - "MassiveScenarioClassification (mya-Mymr)": 59.09, - "MassiveScenarioClassification (ron-Latn)": 64.2, - "MassiveScenarioClassification (hin-Deva)": 65.23, - "MassiveScenarioClassification (hun-Latn)": 66.57, - "MassiveScenarioClassification (afr-Latn)": 53.63, - "MassiveScenarioClassification (tam-Taml)": 42.63, - "MassiveScenarioClassification (hye-Armn)": 56.11, - "MassiveScenarioClassification (vie-Latn)": 60.73, - "MassiveScenarioClassification (lav-Latn)": 59.82, - "MassiveScenarioClassification (mon-Cyrl)": 57.07, - "MassiveScenarioClassification (urd-Arab)": 60.41, - "MassiveScenarioClassification (cmo-Hans)": 67.45, - "MassiveScenarioClassification (swa-Latn)": 34.86, - "MassiveScenarioClassification (amh-Ethi)": 41.89, - "MassiveScenarioClassification (ita-Latn)": 65.01, - "MassiveScenarioClassification (cmo-Hant)": 65.72, - "MassiveScenarioClassification (pl)": 65.04, - "MassiveScenarioClassification (fr)": 64.52, - "MultilingualSentiment (cmn-Hans)": 61.9, - "NoRecClassification (nob-Latn)": 46.7, - "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 42.52, - "OnlineShopping (cmn-Hans)": 84.89, - "PAC (pol-Latn)": 65.75, - "PAC": 65.76, - "PolEmo2.0-IN (pol-Latn)": 57.76, - "PolEmo2.0-IN": 57.76, - "PolEmo2.0-OUT (pol-Latn)": 28.66, - "PolEmo2.0-OUT": 28.7, - "RuReviewsClassification (rus-Cyrl)": 58.88, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 53.19, - "RuSciBenchOECDClassification (rus-Cyrl)": 41.41, - "TNews (cmn-Hans)": 39.19, - "ToxicConversationsClassification": 66.07, - "TweetSentimentExtractionClassification": 56.12, - "Waimai (cmn-Hans)": 82.27 + "Model": "norbert3-large" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "norbert3-large" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "norbert3-large" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "norbert3-large" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "norbert3-large" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "norbert3-large" + } + ] + } + }, + "electra-small-nordic": { + "BitextMining": { + "f1": [ + { + "Model": "electra-small-nordic", + "BornholmBitextMining": 1.44 + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "electra-small-nordic", + "AngryTweetsClassification": 47.91, + "DKHateClassification": 59.45, + "DanishPoliticalCommentsClassification": 31.89, + "LccSentimentClassification": 47.93, + "MassiveIntentClassification (da)": 26.3, + "MassiveIntentClassification (nb)": 24.6, + "MassiveIntentClassification (sv)": 27.58, + "MassiveScenarioClassification (da)": 28.93, + "MassiveScenarioClassification (nb)": 27.3, + "MassiveScenarioClassification (sv)": 29.93, + "NoRecClassification": 45.44, + "NordicLangClassification": 57.82, + "NorwegianParliament": 53.25, + "ScalaDaClassification": 70.41, + "ScalaNbClassification": 75.28 } ] }, "Clustering": { "v_measure": [ { - "Model": "paraphrase-multilingual-MiniLM-L12-v2", - "8TagsClustering": 23.24, - "AlloProfClusteringP2P": 56.06, - "AlloProfClusteringS2S": 42.16, - "ArxivClusteringP2P": 38.33, - "ArxivClusteringS2S": 31.55, - "BiorxivClusteringP2P": 33.49, - "BiorxivClusteringS2S": 29.44, - "BlurbsClusteringP2P": 32.46, - "BlurbsClusteringS2S": 14.33, - "GeoreviewClusteringP2P (rus-Cyrl)": 53.35, - "HALClusteringS2S": 23.21, - "MLSUMClusteringP2P (rus-Cyrl)": 37.0, - "MLSUMClusteringP2P": 39.97, - "MLSUMClusteringS2S (rus-Cyrl)": 38.16, - "MLSUMClusteringS2S": 36.55, - "MasakhaNEWSClusteringP2P (amh-Ethi)": 40.36, - "MasakhaNEWSClusteringP2P (eng)": 49.96, - "MasakhaNEWSClusteringP2P (fra-Latn)": 40.85, - "MasakhaNEWSClusteringP2P (hau-Latn)": 19.39, - "MasakhaNEWSClusteringP2P (ibo-Latn)": 33.81, - "MasakhaNEWSClusteringP2P (lin-Latn)": 51.98, - "MasakhaNEWSClusteringP2P (lug-Latn)": 41.88, - "MasakhaNEWSClusteringP2P (orm-Ethi)": 22.23, - "MasakhaNEWSClusteringP2P (pcm-Latn)": 64.64, - "MasakhaNEWSClusteringP2P (run-Latn)": 48.03, - "MasakhaNEWSClusteringP2P (sna-Latn)": 44.62, - "MasakhaNEWSClusteringP2P (som-Latn)": 27.54, - "MasakhaNEWSClusteringP2P (swa-Latn)": 22.69, - "MasakhaNEWSClusteringP2P (tir-Ethi)": 42.02, - "MasakhaNEWSClusteringP2P (xho-Latn)": 27.68, - "MasakhaNEWSClusteringP2P (yor-Latn)": 27.29, - "MasakhaNEWSClusteringP2P (fra)": 36.58, - "MasakhaNEWSClusteringS2S (amh-Ethi)": 42.28, - "MasakhaNEWSClusteringS2S (eng)": 25.74, - "MasakhaNEWSClusteringS2S (fra-Latn)": 36.5, - "MasakhaNEWSClusteringS2S (hau-Latn)": 9.2, - "MasakhaNEWSClusteringS2S (ibo-Latn)": 33.37, - "MasakhaNEWSClusteringS2S (lin-Latn)": 47.76, - "MasakhaNEWSClusteringS2S (lug-Latn)": 45.15, - "MasakhaNEWSClusteringS2S (orm-Ethi)": 22.08, - "MasakhaNEWSClusteringS2S (pcm-Latn)": 58.42, - "MasakhaNEWSClusteringS2S (run-Latn)": 47.41, - "MasakhaNEWSClusteringS2S (sna-Latn)": 43.0, - "MasakhaNEWSClusteringS2S (som-Latn)": 26.22, - "MasakhaNEWSClusteringS2S (swa-Latn)": 13.53, - "MasakhaNEWSClusteringS2S (tir-Ethi)": 42.4, - "MasakhaNEWSClusteringS2S (xho-Latn)": 21.03, - "MasakhaNEWSClusteringS2S (yor-Latn)": 27.04, - "MasakhaNEWSClusteringS2S (fra)": 33.9, - "MedrxivClusteringP2P": 31.52, - "MedrxivClusteringS2S": 30.87, - "RedditClustering": 42.02, - "RedditClusteringP2P": 50.73, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 48.22, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.68, - "StackExchangeClustering": 49.6, - "StackExchangeClusteringP2P": 31.69, - "TenKGnadClusteringP2P": 36.13, - "TenKGnadClusteringS2S": 22.26, - "TwentyNewsgroupsClustering": 39.28 + "Model": "electra-small-nordic" } ] }, "PairClassification": { "ap": [ { - "Model": "paraphrase-multilingual-MiniLM-L12-v2", - "CDSC-E (pol-Latn)": 72.22, - "CDSC-E": 72.22, - "OpusparcusPC (deu-Latn)": 96.63, - "OpusparcusPC (en)": 98.59, - "OpusparcusPC (fin-Latn)": 93.2, - "OpusparcusPC (fra-Latn)": 92.01, - "OpusparcusPC (rus-Cyrl)": 88.25, - "OpusparcusPC (swe-Latn)": 93.99, - "OpusparcusPC (fr)": 92.01, - "PPC": 91.8, - "PSC (pol-Latn)": 97.14, - "PSC": 97.14, - "PawsXPairClassification (deu-Latn)": 53.26, - "PawsXPairClassification (en)": 55.94, - "PawsXPairClassification (spa-Latn)": 54.61, - "PawsXPairClassification (fra-Latn)": 56.94, - "PawsXPairClassification (jpn-Hira)": 48.66, - "PawsXPairClassification (kor-Hang)": 49.69, - "PawsXPairClassification (cmn-Hans)": 54.3, - "PawsXPairClassification (fr)": 56.94, - "SICK-E-PL (pol-Latn)": 71.94, - "SICK-E-PL": 71.94, - "SprintDuplicateQuestions": 89.46, - "TERRa (rus-Cyrl)": 58.56, - "TwitterSemEval2015": 62.06, - "TwitterURLCorpus": 83.83 + "Model": "electra-small-nordic" } ] }, "Reranking": { "map": [ { - "Model": "paraphrase-multilingual-MiniLM-L12-v2", - "AlloprofReranking (fra-Latn)": 62.42, - "AlloprofReranking": 49.01, - "AskUbuntuDupQuestions": 60.49, - "MMarcoReranking (cmn-Hans)": 16.14, - "MindSmallReranking": 30.37, - "RuBQReranking (rus-Cyrl)": 52.8, - "SciDocsRR": 77.78, - "StackOverflowDupQuestions": 45.85, - "SyntecReranking (fra-Latn)": 72.5, - "SyntecReranking": 75.03, - "T2Reranking (cmn-Hans)": 65.28 + "Model": "electra-small-nordic" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "paraphrase-multilingual-MiniLM-L12-v2", - "AILACasedocs": 13.66, - "AILAStatutes": 20.52, - "ARCChallenge": 6.19, - "AlloprofRetrieval (fra-Latn)": 26.63, - "AlloprofRetrieval": 26.63, - "AlphaNLI": 20.89, - "ArguAna": 44.88, - "ArguAna-PL (pol-Latn)": 37.86, - "ArguAna-PL": 37.83, - "BSARDRetrieval (fra-Latn)": 9.6, - "BSARDRetrieval": 0.0, - "CQADupstackRetrieval": 30.7, - "ClimateFEVER": 18.49, - "CmedqaRetrieval (cmn-Hans)": 10.78, - "CovidRetrieval (cmn-Hans)": 30.11, - "DBPedia": 22.63, - "DBPedia-PL": 18.0, - "DuRetrieval (cmn-Hans)": 34.72, - "EcomRetrieval (cmn-Hans)": 13.32, - "FEVER": 52.66, - "FiQA-PL (pol-Latn)": 12.49, - "FiQA-PL": 12.49, - "FiQA2018": 20.33, - "GerDaLIRSmall (deu-Latn)": 2.62, - "HellaSwag": 16.98, - "HotpotQA": 30.01, - "HotpotQA-PL": 22.76, - "LEMBNarrativeQARetrieval": 13.82, - "LEMBNeedleRetrieval": 13.5, - "LEMBPasskeyRetrieval": 8.25, - "LEMBQMSumRetrieval": 11.02, - "LEMBSummScreenFDRetrieval": 38.12, - "LEMBWikimQARetrieval": 40.84, - "LeCaRDv2 (zho-Hans)": 32.03, - "LegalBenchConsumerContractsQA": 49.81, - "LegalBenchCorporateLobbying": 88.51, - "LegalQuAD (deu-Latn)": 13.31, - "LegalSummarization": 54.97, - "MMarcoRetrieval (cmn-Hans)": 46.62, - "MSMARCO": 23.72, - "MSMARCO-PL": 10.39, - "MedicalRetrieval (cmn-Hans)": 15.46, - "MintakaRetrieval (ara-Arab)": 12.61, - "MintakaRetrieval (deu-Latn)": 21.77, - "MintakaRetrieval (spa-Latn)": 21.59, - "MintakaRetrieval (fra-Latn)": 21.53, - "MintakaRetrieval (hin-Deva)": 16.76, - "MintakaRetrieval (ita-Latn)": 22.23, - "MintakaRetrieval (jpn-Hira)": 14.33, - "MintakaRetrieval (por-Latn)": 22.52, - "MintakaRetrieval (fr)": 21.53, - "NFCorpus": 23.45, - "NFCorpus-PL (pol-Latn)": 17.17, - "NFCorpus-PL": 17.16, - "NQ": 29.8, - "NQ-PL": 12.56, - "PIQA": 15.79, - "Quail": 2.96, - "Quora-PL": 77.18, - "QuoraRetrieval": 86.55, - "RARbCode": 8.48, - "RARbMath": 30.02, - "RiaNewsRetrieval (rus-Cyrl)": 44.82, - "RuBQRetrieval (rus-Cyrl)": 29.7, - "SCIDOCS": 0.03, - "SCIDOCS-PL (pol-Latn)": 10.26, - "SCIDOCS-PL": 10.26, - "SIQA": 0.88, - "SciFact": 48.37, - "SciFact-PL (pol-Latn)": 40.24, - "SciFact-PL": 40.24, - "SpartQA": 4.94, - "SyntecRetrieval (fra-Latn)": 65.54, - "SyntecRetrieval": 65.54, - "T2Retrieval (cmn-Hans)": 30.31, - "TRECCOVID": 39.12, - "TRECCOVID-PL (pol-Latn)": 34.23, - "TRECCOVID-PL": 34.38, - "TempReasonL1": 1.43, - "TempReasonL2Fact": 6.21, - "TempReasonL2Pure": 0.22, - "TempReasonL3Fact": 6.77, - "TempReasonL3Pure": 4.9, - "Touche2020": 16.06, - "VideoRetrieval (cmn-Hans)": 14.71, - "WinoGrande": 46.52, - "XPQARetrieval (ara-Arab_ara-Arab)": 22.97, - "XPQARetrieval (eng-Latn_ara-Arab)": 17.17, - "XPQARetrieval (ara-Arab_eng-Latn)": 25.5, - "XPQARetrieval (deu-Latn_deu-Latn)": 42.62, - "XPQARetrieval (eng-Latn_deu-Latn)": 26.52, - "XPQARetrieval (deu-Latn_eng-Latn)": 48.73, - "XPQARetrieval (spa-Latn_spa-Latn)": 38.24, - "XPQARetrieval (eng-Latn_spa-Latn)": 26.09, - "XPQARetrieval (spa-Latn_eng-Latn)": 41.51, - "XPQARetrieval (fra-Latn_fra-Latn)": 42.51, - "XPQARetrieval (eng-Latn_fra-Latn)": 26.09, - "XPQARetrieval (fra-Latn_eng-Latn)": 43.08, - "XPQARetrieval (hin-Deva_hin-Deva)": 52.09, - "XPQARetrieval (eng-Latn_hin-Deva)": 24.08, - "XPQARetrieval (hin-Deva_eng-Latn)": 49.11, - "XPQARetrieval (ita-Latn_ita-Latn)": 51.63, - "XPQARetrieval (eng-Latn_ita-Latn)": 29.34, - "XPQARetrieval (ita-Latn_eng-Latn)": 46.53, - "XPQARetrieval (jpn-Hira_jpn-Hira)": 51.57, - "XPQARetrieval (eng-Latn_jpn-Hira)": 23.87, - "XPQARetrieval (jpn-Hira_eng-Latn)": 44.93, - "XPQARetrieval (kor-Hang_kor-Hang)": 21.34, - "XPQARetrieval (eng-Latn_kor-Hang)": 21.51, - "XPQARetrieval (kor-Hang_eng-Latn)": 22.59, - "XPQARetrieval (pol-Latn_pol-Latn)": 28.45, - "XPQARetrieval (eng-Latn_pol-Latn)": 17.08, - "XPQARetrieval (pol-Latn_eng-Latn)": 26.57, - "XPQARetrieval (por-Latn_por-Latn)": 32.33, - "XPQARetrieval (eng-Latn_por-Latn)": 19.76, - "XPQARetrieval (por-Latn_eng-Latn)": 34.2, - "XPQARetrieval (tam-Taml_tam-Taml)": 6.36, - "XPQARetrieval (eng-Latn_tam-Taml)": 5.36, - "XPQARetrieval (tam-Taml_eng-Latn)": 9.03, - "XPQARetrieval (cmn-Hans_cmn-Hans)": 44.16, - "XPQARetrieval (eng-Latn_cmn-Hans)": 19.03, - "XPQARetrieval (cmn-Hans_eng-Latn)": 40.08, - "XPQARetrieval (fr)": 42.51 + "Model": "electra-small-nordic" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "electra-small-nordic" + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "electra-small-nordic" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "electra-small-nordic" + } + ] + } + }, + "bge-small-en-v1.5-instruct": { + "BitextMining": { + "f1": [ + { + "Model": "bge-small-en-v1.5-instruct" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "bge-small-en-v1.5-instruct" + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "bge-small-en-v1.5-instruct" + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "bge-small-en-v1.5-instruct" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "bge-small-en-v1.5-instruct" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "bge-small-en-v1.5-instruct", + "ARCChallenge": 7.72, + "AlphaNLI": 1.26, + "HellaSwag": 23.41, + "PIQA": 20.79, + "Quail": 2.01, + "RARbCode": 41.52, + "RARbMath": 46.5, + "SIQA": 0.98, + "SpartQA": 2.86, + "TempReasonL1": 1.27, + "TempReasonL2Fact": 16.72, + "TempReasonL2Pure": 1.1, + "TempReasonL3Fact": 12.81, + "TempReasonL3Pure": 4.63, + "WinoGrande": 5.35 } ] }, "STS": { "spearman": [ { - "Model": "paraphrase-multilingual-MiniLM-L12-v2", - "AFQMC (cmn-Hans)": 14.3, - "ATEC (cmn-Hans)": 18.42, - "BIOSSES": 74.18, - "BQ (cmn-Hans)": 38.53, - "CDSC-R (pol-Latn)": 88.98, - "CDSC-R": 88.98, - "LCQMC (cmn-Hans)": 63.96, - "PAWSX (cmn-Hans)": 10.13, - "RUParaPhraserSTS (rus-Cyrl)": 61.87, - "RuSTSBenchmarkSTS (rus-Cyrl)": 79.55, - "SICK-R": 79.61, - "SICK-R-PL (pol-Latn)": 68.77, - "SICK-R-PL": 68.77, - "SICKFr (fra-Latn)": 75.1, - "SICKFr": 75.1, - "STS12": 76.02, - "STS13": 80.7, - "STS14": 78.85, - "STS15": 85.84, - "STS16": 81.05, - "STS17 (fra-Latn_eng-Latn)": 76.59, - "STS17 (nld-Latn_eng-Latn)": 81.71, - "STS17 (ita-Latn_eng-Latn)": 82.35, - "STS17 (kor-Hang)": 77.03, - "STS17 (ara-Arab)": 79.16, - "STS17 (eng-Latn_ara-Arab)": 81.22, - "STS17 (spa-Latn_eng-Latn)": 84.44, - "STS17 (spa-Latn)": 85.56, - "STS17 (eng-Latn_deu-Latn)": 84.22, - "STS17 (en-en)": 86.87, - "STS17 (eng-Latn_tur-Latn)": 76.74, - "STS17 (ar-ar)": 79.16, - "STS17 (en-ar)": 81.22, - "STS17 (en-de)": 84.22, - "STS17 (en-tr)": 76.74, - "STS17 (es-en)": 84.44, - "STS17 (es-es)": 85.56, - "STS17 (fr-en)": 76.59, - "STS17 (it-en)": 82.35, - "STS17 (ko-ko)": 77.03, - "STS17 (nl-en)": 81.71, - "STS22 (ara-Arab)": 46.2, - "STS22 (spa-Latn_eng-Latn)": 67.33, - "STS22 (cmn-Hans)": 58.75, - "STS22 (fra-Latn)": 70.55, - "STS22 (en)": 62.07, - "STS22 (deu-Latn)": 44.64, - "STS22 (pol-Latn)": 33.74, - "STS22 (rus-Cyrl)": 57.08, - "STS22 (pol-Latn_eng-Latn)": 69.02, - "STS22 (deu-Latn_eng-Latn)": 52.65, - "STS22 (cmn-Hans_eng-Latn)": 65.71, - "STS22 (tur-Latn)": 53.39, - "STS22 (spa-Latn)": 56.56, - "STS22 (deu-Latn_pol-Latn)": 44.22, - "STS22 (spa-Latn_ita-Latn)": 47.67, - "STS22 (deu-Latn_fra-Latn)": 51.73, - "STS22 (fra-Latn_pol-Latn)": 50.71, - "STS22 (ita-Latn)": 55.22, - "STS22 (pl)": 33.73, - "STS22 (fr)": 70.55, - "STSB (cmn-Hans)": 78.91, - "STSBenchmark": 84.42, - "STSBenchmarkMultilingualSTS (spa-Latn)": 81.1, - "STSBenchmarkMultilingualSTS (fra-Latn)": 79.9, - "STSBenchmarkMultilingualSTS (cmn-Hans)": 80.47, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 79.32, - "STSBenchmarkMultilingualSTS (ita-Latn)": 80.39, - "STSBenchmarkMultilingualSTS (pol-Latn)": 78.29, - "STSBenchmarkMultilingualSTS (por-Latn)": 80.16, - "STSBenchmarkMultilingualSTS (deu-Latn)": 78.87, - "STSBenchmarkMultilingualSTS (nld-Latn)": 79.54, - "STSBenchmarkMultilingualSTS (en)": 84.42, - "STSBenchmarkMultilingualSTS (fr)": 79.9 + "Model": "bge-small-en-v1.5-instruct" } ] }, "Summarization": { "spearman": [ { - "Model": "paraphrase-multilingual-MiniLM-L12-v2", - "SummEval": 30.67, - "SummEvalFr (fra-Latn)": 29.2, - "SummEvalFr": 29.2 + "Model": "bge-small-en-v1.5-instruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "paraphrase-multilingual-MiniLM-L12-v2" + "Model": "bge-small-en-v1.5-instruct" } ] } }, - "nb-bert-base": { + "sentence-croissant-llm-base": { "BitextMining": { "f1": [ { - "Model": "nb-bert-base", - "BornholmBitextMining": 9.88 + "Model": "sentence-croissant-llm-base" } ] }, "Classification": { "accuracy": [ { - "Model": "nb-bert-base", - "AngryTweetsClassification": 52.14, - "DKHateClassification": 61.73, - "DanishPoliticalCommentsClassification": 34.84, - "LccSentimentClassification": 51.4, - "MassiveIntentClassification (da)": 56.69, - "MassiveIntentClassification (nb)": 60.67, - "MassiveIntentClassification (sv)": 53.89, - "MassiveScenarioClassification (da)": 61.93, - "MassiveScenarioClassification (nb)": 67.31, - "MassiveScenarioClassification (sv)": 55.37, - "NoRecClassification": 51.32, - "NordicLangClassification": 84.69, - "NorwegianParliament": 57.41, - "ScalaDaClassification": 57.99, - "ScalaNbClassification": 62.25 + "Model": "sentence-croissant-llm-base", + "AmazonReviewsClassification (fr)": 34.79, + "MTOPDomainClassification (fr)": 85.52, + "MTOPIntentClassification (fr)": 63.12, + "MasakhaNEWSClassification (fra)": 79.29, + "MassiveIntentClassification (fr)": 59.41, + "MassiveScenarioClassification (fr)": 65.29 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "sentence-croissant-llm-base", + "AlloProfClusteringP2P": 64.12, + "AlloProfClusteringS2S": 32.52, + "HALClusteringS2S": 23.4, + "MLSUMClusteringP2P": 42.94, + "MLSUMClusteringS2S": 33.91, + "MasakhaNEWSClusteringP2P (fra)": 53.94, + "MasakhaNEWSClusteringS2S (fra)": 41.05 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "sentence-croissant-llm-base", + "OpusparcusPC (fr)": 91.42, + "PawsXPairClassification (fr)": 63.13 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "sentence-croissant-llm-base", + "AlloprofReranking": 53.0, + "SyntecReranking": 82.9 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "sentence-croissant-llm-base", + "AlloprofRetrieval": 29.97, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 21.31, + "SyntecRetrieval": 74.2, + "XPQARetrieval (fr)": 58.57 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "sentence-croissant-llm-base", + "SICKFr": 69.6, + "STS22 (fr)": 78.77, + "STSBenchmarkMultilingualSTS (fr)": 79.23 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "sentence-croissant-llm-base", + "SummEvalFr": 29.04 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "sentence-croissant-llm-base" + } + ] + } + }, + "text2vec-base-chinese": { + "BitextMining": { + "f1": [ + { + "Model": "text2vec-base-chinese" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "text2vec-base-chinese", + "AmazonReviewsClassification (zh)": 34.12, + "IFlyTek": 42.05, + "JDReview": 82.14, + "MassiveIntentClassification (zh-CN)": 63.98, + "MassiveScenarioClassification (zh-CN)": 70.52, + "MultilingualSentiment": 60.98, + "OnlineShopping": 85.69, + "TNews": 43.01, + "Waimai": 77.22 } ] }, "Clustering": { "v_measure": [ { - "Model": "nb-bert-base" + "Model": "text2vec-base-chinese", + "CLSClusteringP2P": 35.27, + "CLSClusteringS2S": 32.42, + "ThuNewsClusteringP2P": 42.92, + "ThuNewsClusteringS2S": 40.01 } ] }, "PairClassification": { "ap": [ { - "Model": "nb-bert-base" + "Model": "text2vec-base-chinese", + "Cmnli": 73.87, + "Ocnli": 60.95 } ] }, "Reranking": { "map": [ { - "Model": "nb-bert-base" + "Model": "text2vec-base-chinese", + "CMedQAv1": 59.26, + "CMedQAv2": 59.82, + "MMarcoReranking": 12.76, + "T2Reranking": 65.95 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "nb-bert-base" + "Model": "text2vec-base-chinese", + "CmedqaRetrieval": 15.91, + "CovidRetrieval": 44.81, + "DuRetrieval": 52.23, + "EcomRetrieval": 34.6, + "MMarcoRetrieval": 44.06, + "MedicalRetrieval": 27.56, + "T2Retrieval": 51.67, + "VideoRetrieval": 39.52 } ] }, "STS": { "spearman": [ { - "Model": "nb-bert-base" + "Model": "text2vec-base-chinese", + "AFQMC": 26.06, + "ATEC": 31.93, + "BQ": 42.67, + "LCQMC": 70.16, + "PAWSX": 17.21, + "QBQTC": 24.62, + "STS22 (zh)": 55.35, + "STSB": 79.3 } ] }, "Summarization": { "spearman": [ { - "Model": "nb-bert-base" + "Model": "text2vec-base-chinese" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "nb-bert-base" + "Model": "text2vec-base-chinese" } ] } }, - "xlm-roberta-large": { + "text-embedding-3-large-256": { "BitextMining": { "f1": [ { - "Model": "xlm-roberta-large" + "Model": "text-embedding-3-large-256" } ] }, "Classification": { "accuracy": [ { - "Model": "xlm-roberta-large", - "AmazonReviewsClassification (fr)": 26.62, - "MTOPDomainClassification (fr)": 36.77, - "MTOPIntentClassification (fr)": 15.37, - "MasakhaNEWSClassification (fra)": 65.76, - "MassiveIntentClassification (fr)": 15.82, - "MassiveScenarioClassification (fr)": 23.92 + "Model": "text-embedding-3-large-256", + "AmazonCounterfactualClassification (en)": 73.96, + "AmazonPolarityClassification": 91.32, + "AmazonReviewsClassification (en)": 46.03, + "Banking77Classification": 83.19, + "EmotionClassification": 45.8, + "ImdbClassification": 85.93, + "MTOPDomainClassification (en)": 92.76, + "MTOPIntentClassification (en)": 70.45, + "MassiveIntentClassification (en)": 71.12, + "MassiveScenarioClassification (en)": 75.56, + "ToxicConversationsClassification": 68.52, + "TweetSentimentExtractionClassification": 58.98 } ] }, "Clustering": { "v_measure": [ { - "Model": "xlm-roberta-large", - "AlloProfClusteringP2P": 56.54, - "AlloProfClusteringS2S": 21.18, - "BlurbsClusteringP2P": 29.84, - "BlurbsClusteringS2S": 7.29, - "HALClusteringS2S": 5.94, - "MLSUMClusteringP2P": 42.67, - "MLSUMClusteringS2S": 18.5, - "MasakhaNEWSClusteringP2P (fra)": 34.02, - "MasakhaNEWSClusteringS2S (fra)": 21.52, - "TenKGnadClusteringP2P": 32.46, - "TenKGnadClusteringS2S": 6.16 + "Model": "text-embedding-3-large-256", + "ArxivClusteringP2P": 47.05, + "ArxivClusteringS2S": 42.59, + "BiorxivClusteringP2P": 35.43, + "BiorxivClusteringS2S": 33.86, + "MedrxivClusteringP2P": 32.1, + "MedrxivClusteringS2S": 31.15, + "RedditClustering": 60.18, + "RedditClusteringP2P": 64.71, + "StackExchangeClustering": 71.23, + "StackExchangeClusteringP2P": 35.95, + "TwentyNewsgroupsClustering": 54.24 } ] }, "PairClassification": { "ap": [ { - "Model": "xlm-roberta-large", - "OpusparcusPC (fr)": 83.73, - "PawsXPairClassification (fr)": 53.38 + "Model": "text-embedding-3-large-256", + "SprintDuplicateQuestions": 89.02, + "TwitterSemEval2015": 76.56, + "TwitterURLCorpus": 87.09 } ] }, "Reranking": { "map": [ { - "Model": "xlm-roberta-large", - "AlloprofReranking": 28.62, - "SyntecReranking": 49.4 + "Model": "text-embedding-3-large-256", + "AskUbuntuDupQuestions": 64.61, + "MindSmallReranking": 29.63, + "SciDocsRR": 84.25, + "StackOverflowDupQuestions": 53.46 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "xlm-roberta-large", - "AlloprofRetrieval": 0.52, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 0.9, - "SyntecRetrieval": 6.6, - "XPQARetrieval (fr)": 12.7 + "Model": "text-embedding-3-large-256", + "ArguAna": 55.6, + "CQADupstackRetrieval": 42.28, + "ClimateFEVER": 25.8, + "DBPedia": 40.8, + "FEVER": 84.57, + "FiQA2018": 50.33, + "HotpotQA": 62.69, + "MSMARCO": 37.93, + "NFCorpus": 37.94, + "NQ": 56.64, + "QuoraRetrieval": 88.22, + "SCIDOCS": 20.44, + "SciFact": 73.1, + "TRECCOVID": 76.24, + "Touche2020": 22.31 } ] }, "STS": { "spearman": [ { - "Model": "xlm-roberta-large", - "SICKFr": 50.01, - "STS22 (fr)": 55.49, - "STSBenchmarkMultilingualSTS (fr)": 42.32 + "Model": "text-embedding-3-large-256", + "BIOSSES": 84.87, + "SICK-R": 79.18, + "STS12": 71.98, + "STS13": 85.52, + "STS14": 80.5, + "STS15": 87.51, + "STS16": 84.48, + "STS17 (en-en)": 88.11, + "STS22 (en)": 65.92, + "STSBenchmark": 82.34 } ] }, "Summarization": { "spearman": [ { - "Model": "xlm-roberta-large", - "SummEvalFr": 28.89 + "Model": "text-embedding-3-large-256", + "SummEval": 29.92 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "xlm-roberta-large" + "Model": "text-embedding-3-large-256" } ] } }, - "text-similarity-curie-001": { + "flaubert_large_cased": { "BitextMining": { "f1": [ { - "Model": "text-similarity-curie-001" + "Model": "flaubert_large_cased" } ] }, "Classification": { "accuracy": [ { - "Model": "text-similarity-curie-001" + "Model": "flaubert_large_cased", + "AmazonReviewsClassification (fr)": 22.45, + "MTOPDomainClassification (fr)": 24.27, + "MTOPIntentClassification (fr)": 9.79, + "MasakhaNEWSClassification (fra)": 55.64, + "MassiveIntentClassification (fr)": 16.41, + "MassiveScenarioClassification (fr)": 22.72 } ] }, "Clustering": { "v_measure": [ { - "Model": "text-similarity-curie-001", - "RedditClustering": 40.79, - "StackExchangeClustering": 55.14, - "TwentyNewsgroupsClustering": 37.64 + "Model": "flaubert_large_cased", + "AlloProfClusteringP2P": 40.85, + "AlloProfClusteringS2S": 21.76, + "HALClusteringS2S": 5.26, + "MLSUMClusteringP2P": 38.09, + "MLSUMClusteringS2S": 18.71, + "MasakhaNEWSClusteringP2P (fra)": 26.43, + "MasakhaNEWSClusteringS2S (fra)": 24.68 } ] }, "PairClassification": { "ap": [ { - "Model": "text-similarity-curie-001", - "SprintDuplicateQuestions": 79.85, - "TwitterSemEval2015": 69.45, - "TwitterURLCorpus": 84.06 + "Model": "flaubert_large_cased", + "OpusparcusPC (fr)": 74.78, + "PawsXPairClassification (fr)": 54.14 } ] }, "Reranking": { "map": [ { - "Model": "text-similarity-curie-001", - "AskUbuntuDupQuestions": 55.09, - "SciDocsRR": 70.93, - "StackOverflowDupQuestions": 42.42 + "Model": "flaubert_large_cased", + "AlloprofReranking": 26.29, + "SyntecReranking": 42.8 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "text-similarity-curie-001", - "FiQA2018": 5.14, - "NFCorpus": 19.96, - "QuoraRetrieval": 83.11, - "SciFact": 46.68, - "TRECCOVID": 7.61 + "Model": "flaubert_large_cased", + "AlloprofRetrieval": 0.58, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 0.26, + "SyntecRetrieval": 1.58, + "XPQARetrieval (fr)": 3.69 } ] }, "STS": { "spearman": [ { - "Model": "text-similarity-curie-001", - "BIOSSES": 77.46, - "SICK-R": 77.26, - "STSBenchmark": 83.02 + "Model": "flaubert_large_cased", + "SICKFr": 34.6, + "STS22 (fr)": 48.52, + "STSBenchmarkMultilingualSTS (fr)": 15.66 } ] }, "Summarization": { "spearman": [ { - "Model": "text-similarity-curie-001" + "Model": "flaubert_large_cased", + "SummEvalFr": 29.25 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "text-similarity-curie-001" + "Model": "flaubert_large_cased" } ] } }, - "gbert-base": { + "flan-t5-base": { "BitextMining": { "f1": [ { - "Model": "gbert-base" + "Model": "flan-t5-base" } ] }, "Classification": { "accuracy": [ { - "Model": "gbert-base" + "Model": "flan-t5-base" } ] }, "Clustering": { "v_measure": [ { - "Model": "gbert-base", - "BlurbsClusteringP2P": 35.36, - "BlurbsClusteringS2S": 11.27, - "TenKGnadClusteringP2P": 37.16, - "TenKGnadClusteringS2S": 24.23 + "Model": "flan-t5-base" } ] }, "PairClassification": { "ap": [ { - "Model": "gbert-base" + "Model": "flan-t5-base" } ] }, "Reranking": { "map": [ { - "Model": "gbert-base" + "Model": "flan-t5-base" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "gbert-base" + "Model": "flan-t5-base" } ] }, "STS": { "spearman": [ { - "Model": "gbert-base" + "Model": "flan-t5-base" } ] }, "Summarization": { "spearman": [ { - "Model": "gbert-base" + "Model": "flan-t5-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "gbert-base" + "Model": "flan-t5-base", + "Core17InstructionRetrieval": -3.31, + "News21InstructionRetrieval": -0.12, + "Robust04InstructionRetrieval": 5.35 } ] } }, - "nomic-embed-text-v1.5-64": { + "tart-dual-contriever-msmarco": { "BitextMining": { "f1": [ { - "Model": "nomic-embed-text-v1.5-64" + "Model": "tart-dual-contriever-msmarco" } ] }, "Classification": { "accuracy": [ { - "Model": "nomic-embed-text-v1.5-64", - "AmazonCounterfactualClassification (en)": 66.85, - "AmazonPolarityClassification": 85.92, - "AmazonReviewsClassification (en)": 41.02, - "Banking77Classification": 80.63, - "EmotionClassification": 40.55, - "ImdbClassification": 76.6, - "MTOPDomainClassification (en)": 86.31, - "MTOPIntentClassification (en)": 62.77, - "MassiveIntentClassification (en)": 64.95, - "MassiveScenarioClassification (en)": 70.38, - "ToxicConversationsClassification": 66.53, - "TweetSentimentExtractionClassification": 55.23 + "Model": "tart-dual-contriever-msmarco" } ] }, "Clustering": { "v_measure": [ { - "Model": "nomic-embed-text-v1.5-64", - "ArxivClusteringP2P": 41.8, - "ArxivClusteringS2S": 32.41, - "BiorxivClusteringP2P": 34.81, - "BiorxivClusteringS2S": 28.59, - "MedrxivClusteringP2P": 32.73, - "MedrxivClusteringS2S": 29.91, - "RedditClustering": 50.31, - "RedditClusteringP2P": 56.57, - "StackExchangeClustering": 57.99, - "StackExchangeClusteringP2P": 33.64, - "TwentyNewsgroupsClustering": 44.61 + "Model": "tart-dual-contriever-msmarco" } ] }, "PairClassification": { "ap": [ { - "Model": "nomic-embed-text-v1.5-64", - "SprintDuplicateQuestions": 90.06, - "TwitterSemEval2015": 71.68, - "TwitterURLCorpus": 85.03 + "Model": "tart-dual-contriever-msmarco" } ] }, "Reranking": { "map": [ { - "Model": "nomic-embed-text-v1.5-64", - "AskUbuntuDupQuestions": 60.79, - "MindSmallReranking": 29.7, - "SciDocsRR": 75.79, - "StackOverflowDupQuestions": 47.42 + "Model": "tart-dual-contriever-msmarco" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "nomic-embed-text-v1.5-64", - "ArguAna": 37.16, - "CQADupstackRetrieval": 28.72, - "ClimateFEVER": 31.48, - "DBPedia": 28.19, - "FEVER": 70.24, - "FiQA2018": 25.78, - "HotpotQA": 43.07, - "MSMARCO": 35.95, - "NFCorpus": 26.03, - "NQ": 45.54, - "QuoraRetrieval": 85.83, - "SCIDOCS": 12.09, - "SciFact": 52.71, - "TRECCOVID": 67.83, - "Touche2020": 23.13 + "Model": "tart-dual-contriever-msmarco" } ] }, "STS": { "spearman": [ { - "Model": "nomic-embed-text-v1.5-64", - "BIOSSES": 77.18, - "SICK-R": 78.76, - "STS12": 77.3, - "STS13": 84.18, - "STS14": 79.37, - "STS15": 84.69, - "STS16": 83.36, - "STS17 (en-en)": 85.73, - "STS22 (en)": 63.83, - "STSBenchmark": 83.46 + "Model": "tart-dual-contriever-msmarco" } ] }, "Summarization": { "spearman": [ { - "Model": "nomic-embed-text-v1.5-64", - "SummEval": 28.41 + "Model": "tart-dual-contriever-msmarco" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "nomic-embed-text-v1.5-64" + "Model": "tart-dual-contriever-msmarco", + "Core17InstructionRetrieval": -3.04, + "News21InstructionRetrieval": -2.98, + "Robust04InstructionRetrieval": -8.98 } ] } }, - "bert-base-swedish-cased": { + "all-MiniLM-L6-v2": { "BitextMining": { "f1": [ { - "Model": "bert-base-swedish-cased", - "BornholmBitextMining": 6.6 + "Model": "all-MiniLM-L6-v2", + "BornholmBitextMining": 29.68, + "BornholmBitextMining (dan-Latn)": 29.68, + "Tatoeba (kab-Latn_eng-Latn)": 0.96, + "Tatoeba (aze-Latn_eng-Latn)": 1.04, + "Tatoeba (wuu-Hans_eng-Latn)": 0.6, + "Tatoeba (fra-Latn_eng-Latn)": 8.17, + "Tatoeba (nov-Latn_eng-Latn)": 13.97, + "Tatoeba (slk-Latn_eng-Latn)": 3.27, + "Tatoeba (mkd-Cyrl_eng-Latn)": 0.0, + "Tatoeba (ukr-Cyrl_eng-Latn)": 0.3, + "Tatoeba (kur-Latn_eng-Latn)": 5.21, + "Tatoeba (hin-Deva_eng-Latn)": 0.0, + "Tatoeba (tgl-Latn_eng-Latn)": 2.69, + "Tatoeba (jav-Latn_eng-Latn)": 3.37, + "Tatoeba (nob-Latn_eng-Latn)": 4.34, + "Tatoeba (tam-Taml_eng-Latn)": 0.33, + "Tatoeba (hsb-Latn_eng-Latn)": 2.65, + "Tatoeba (srp-Cyrl_eng-Latn)": 1.28, + "Tatoeba (cat-Latn_eng-Latn)": 6.93, + "Tatoeba (jpn-Jpan_eng-Latn)": 0.97, + "Tatoeba (kzj-Latn_eng-Latn)": 2.78, + "Tatoeba (uig-Arab_eng-Latn)": 0.2, + "Tatoeba (max-Deva_eng-Latn)": 6.93, + "Tatoeba (dtp-Latn_eng-Latn)": 1.88, + "Tatoeba (cbk-Latn_eng-Latn)": 7.04, + "Tatoeba (bre-Latn_eng-Latn)": 3.22, + "Tatoeba (arz-Arab_eng-Latn)": 0.0, + "Tatoeba (heb-Hebr_eng-Latn)": 0.22, + "Tatoeba (kat-Geor_eng-Latn)": 0.3, + "Tatoeba (yid-Hebr_eng-Latn)": 0.14, + "Tatoeba (lit-Latn_eng-Latn)": 0.92, + "Tatoeba (ber-Tfng_eng-Latn)": 4.69, + "Tatoeba (hun-Latn_eng-Latn)": 3.56, + "Tatoeba (mhr-Cyrl_eng-Latn)": 0.0, + "Tatoeba (isl-Latn_eng-Latn)": 2.37, + "Tatoeba (ind-Latn_eng-Latn)": 3.86, + "Tatoeba (tuk-Latn_eng-Latn)": 3.52, + "Tatoeba (kor-Hang_eng-Latn)": 0.45, + "Tatoeba (ara-Arab_eng-Latn)": 0.0, + "Tatoeba (tzl-Latn_eng-Latn)": 4.58, + "Tatoeba (swe-Latn_eng-Latn)": 6.06, + "Tatoeba (ang-Latn_eng-Latn)": 15.64, + "Tatoeba (mon-Cyrl_eng-Latn)": 0.38, + "Tatoeba (urd-Arab_eng-Latn)": 0.1, + "Tatoeba (vie-Latn_eng-Latn)": 3.07, + "Tatoeba (ina-Latn_eng-Latn)": 17.63, + "Tatoeba (hrv-Latn_eng-Latn)": 3.83, + "Tatoeba (war-Latn_eng-Latn)": 4.94, + "Tatoeba (cor-Latn_eng-Latn)": 2.41, + "Tatoeba (tur-Latn_eng-Latn)": 3.59, + "Tatoeba (bul-Cyrl_eng-Latn)": 0.21, + "Tatoeba (spa-Latn_eng-Latn)": 5.63, + "Tatoeba (tel-Telu_eng-Latn)": 0.46, + "Tatoeba (nds-Latn_eng-Latn)": 9.56, + "Tatoeba (lvs-Latn_eng-Latn)": 2.61, + "Tatoeba (amh-Ethi_eng-Latn)": 0.25, + "Tatoeba (pms-Latn_eng-Latn)": 7.62, + "Tatoeba (xho-Latn_eng-Latn)": 4.01, + "Tatoeba (epo-Latn_eng-Latn)": 5.46, + "Tatoeba (por-Latn_eng-Latn)": 8.29, + "Tatoeba (ile-Latn_eng-Latn)": 13.54, + "Tatoeba (ell-Grek_eng-Latn)": 0.1, + "Tatoeba (oci-Latn_eng-Latn)": 6.55, + "Tatoeba (pes-Arab_eng-Latn)": 0.0, + "Tatoeba (tat-Cyrl_eng-Latn)": 0.44, + "Tatoeba (awa-Deva_eng-Latn)": 0.51, + "Tatoeba (fao-Latn_eng-Latn)": 5.33, + "Tatoeba (swg-Latn_eng-Latn)": 8.92, + "Tatoeba (uzb-Latn_eng-Latn)": 2.34, + "Tatoeba (cym-Latn_eng-Latn)": 6.09, + "Tatoeba (mar-Deva_eng-Latn)": 0.0, + "Tatoeba (fry-Latn_eng-Latn)": 11.22, + "Tatoeba (ces-Latn_eng-Latn)": 3.04, + "Tatoeba (afr-Latn_eng-Latn)": 5.89, + "Tatoeba (csb-Latn_eng-Latn)": 3.78, + "Tatoeba (pol-Latn_eng-Latn)": 2.58, + "Tatoeba (gla-Latn_eng-Latn)": 2.7, + "Tatoeba (deu-Latn_eng-Latn)": 7.89, + "Tatoeba (cmn-Hans_eng-Latn)": 1.92, + "Tatoeba (ita-Latn_eng-Latn)": 9.9, + "Tatoeba (ben-Beng_eng-Latn)": 0.0, + "Tatoeba (glg-Latn_eng-Latn)": 9.31, + "Tatoeba (dsb-Latn_eng-Latn)": 2.9, + "Tatoeba (pam-Latn_eng-Latn)": 3.54, + "Tatoeba (ast-Latn_eng-Latn)": 6.84, + "Tatoeba (bos-Latn_eng-Latn)": 5.58, + "Tatoeba (nld-Latn_eng-Latn)": 10.16, + "Tatoeba (bel-Cyrl_eng-Latn)": 0.5, + "Tatoeba (orv-Cyrl_eng-Latn)": 0.0, + "Tatoeba (gsw-Latn_eng-Latn)": 11.33, + "Tatoeba (dan-Latn_eng-Latn)": 7.84, + "Tatoeba (hye-Armn_eng-Latn)": 0.41, + "Tatoeba (mal-Mlym_eng-Latn)": 0.15, + "Tatoeba (arq-Arab_eng-Latn)": 0.11, + "Tatoeba (kaz-Cyrl_eng-Latn)": 0.42, + "Tatoeba (khm-Khmr_eng-Latn)": 0.42, + "Tatoeba (tha-Thai_eng-Latn)": 0.3, + "Tatoeba (swh-Latn_eng-Latn)": 5.8, + "Tatoeba (gle-Latn_eng-Latn)": 2.75, + "Tatoeba (ceb-Latn_eng-Latn)": 3.39, + "Tatoeba (sqi-Latn_eng-Latn)": 3.58, + "Tatoeba (slv-Latn_eng-Latn)": 3.25, + "Tatoeba (ido-Latn_eng-Latn)": 7.48, + "Tatoeba (yue-Hant_eng-Latn)": 0.86, + "Tatoeba (nno-Latn_eng-Latn)": 5.38, + "Tatoeba (est-Latn_eng-Latn)": 2.36, + "Tatoeba (lfn-Latn_eng-Latn)": 4.55, + "Tatoeba (lat-Latn_eng-Latn)": 5.04, + "Tatoeba (cha-Latn_eng-Latn)": 13.29, + "Tatoeba (eus-Latn_eng-Latn)": 5.54, + "Tatoeba (fin-Latn_eng-Latn)": 2.79, + "Tatoeba (rus-Cyrl_eng-Latn)": 0.07, + "Tatoeba (ron-Latn_eng-Latn)": 6.82, + "Tatoeba (zsm-Latn_eng-Latn)": 4.24 } ] }, "Classification": { "accuracy": [ { - "Model": "bert-base-swedish-cased", - "AngryTweetsClassification": 44.58, - "DKHateClassification": 55.53, - "DanishPoliticalCommentsClassification": 28.97, - "LccSentimentClassification": 41.2, - "MassiveIntentClassification (da)": 37.98, - "MassiveIntentClassification (nb)": 35.75, - "MassiveIntentClassification (sv)": 52.75, - "MassiveScenarioClassification (da)": 40.44, - "MassiveScenarioClassification (nb)": 35.76, - "MassiveScenarioClassification (sv)": 56.09, - "NoRecClassification": 43.91, - "NordicLangClassification": 62.45, - "NorwegianParliament": 57.56, - "ScalaDaClassification": 53.53, - "ScalaNbClassification": 53.63 + "Model": "all-MiniLM-L6-v2", + "AllegroReviews (pol-Latn)": 24.64, + "AmazonCounterfactualClassification (en)": 63.64, + "AmazonCounterfactualClassification (en-ext)": 65.59, + "AmazonCounterfactualClassification (deu-Latn)": 57.82, + "AmazonCounterfactualClassification (jpn-Jpan)": 60.9, + "AmazonPolarityClassification": 64.26, + "AmazonReviewsClassification (en)": 30.85, + "AmazonReviewsClassification (deu-Latn)": 26.44, + "AmazonReviewsClassification (spa-Latn)": 27.35, + "AmazonReviewsClassification (fra-Latn)": 26.88, + "AmazonReviewsClassification (jpn-Jpan)": 23.78, + "AmazonReviewsClassification (cmn-Hans)": 23.67, + "AngryTweetsClassification": 42.49, + "AngryTweetsClassification (dan-Latn)": 42.48, + "Banking77Classification": 80.04, + "CBD (pol-Latn)": 50.9, + "DKHateClassification": 55.05, + "DanishPoliticalCommentsClassification": 26.96, + "DanishPoliticalCommentsClassification (dan-Latn)": 26.7, + "EmotionClassification": 40.83, + "GeoreviewClassification (rus-Cyrl)": 27.08, + "HeadlineClassification (rus-Cyrl)": 27.77, + "IFlyTek (cmn-Hans)": 16.09, + "ImdbClassification": 61.76, + "InappropriatenessClassification (rus-Cyrl)": 51.73, + "JDReview (cmn-Hans)": 59.98, + "KinopoiskClassification (rus-Cyrl)": 33.93, + "LccSentimentClassification": 38.47, + "LccSentimentClassification (dan-Latn)": 38.53, + "MTOPDomainClassification (en)": 91.68, + "MTOPDomainClassification (deu-Latn)": 70.47, + "MTOPDomainClassification (spa-Latn)": 72.99, + "MTOPDomainClassification (fra-Latn)": 75.1, + "MTOPDomainClassification (hin-Deva)": 40.74, + "MTOPDomainClassification (tha-Thai)": 15.66, + "MTOPIntentClassification (en)": 61.55, + "MTOPIntentClassification (deu-Latn)": 45.7, + "MTOPIntentClassification (spa-Latn)": 44.19, + "MTOPIntentClassification (fra-Latn)": 39.67, + "MTOPIntentClassification (hin-Deva)": 18.69, + "MTOPIntentClassification (tha-Thai)": 5.78, + "MasakhaNEWSClassification (fra)": 74.05, + "MasakhaNEWSClassification (amh-Ethi)": 33.03, + "MasakhaNEWSClassification (eng)": 77.11, + "MasakhaNEWSClassification (fra-Latn)": 68.84, + "MasakhaNEWSClassification (hau-Latn)": 50.49, + "MasakhaNEWSClassification (ibo-Latn)": 52.15, + "MasakhaNEWSClassification (lin-Latn)": 68.29, + "MasakhaNEWSClassification (lug-Latn)": 47.58, + "MasakhaNEWSClassification (orm-Ethi)": 50.68, + "MasakhaNEWSClassification (pcm-Latn)": 92.56, + "MasakhaNEWSClassification (run-Latn)": 54.81, + "MasakhaNEWSClassification (sna-Latn)": 65.58, + "MasakhaNEWSClassification (som-Latn)": 39.8, + "MasakhaNEWSClassification (swa-Latn)": 47.25, + "MasakhaNEWSClassification (tir-Ethi)": 28.97, + "MasakhaNEWSClassification (xho-Latn)": 54.14, + "MasakhaNEWSClassification (yor-Latn)": 55.01, + "MassiveIntentClassification (en)": 66.94, + "MassiveIntentClassification (da)": 40.99, + "MassiveIntentClassification (nb)": 39.34, + "MassiveIntentClassification (sv)": 38.1, + "MassiveIntentClassification (aze-Latn)": 30.63, + "MassiveIntentClassification (spa-Latn)": 39.88, + "MassiveIntentClassification (tam-Taml)": 11.31, + "MassiveIntentClassification (swe-Latn)": 38.09, + "MassiveIntentClassification (fas-Arab)": 19.1, + "MassiveIntentClassification (khm-Khmr)": 4.89, + "MassiveIntentClassification (mon-Cyrl)": 20.35, + "MassiveIntentClassification (hye-Armn)": 7.62, + "MassiveIntentClassification (kan-Knda)": 3.14, + "MassiveIntentClassification (cmo-Hans)": 24.4, + "MassiveIntentClassification (rus-Cyrl)": 27.58, + "MassiveIntentClassification (jpn-Jpan)": 31.87, + "MassiveIntentClassification (deu-Latn)": 43.44, + "MassiveIntentClassification (ind-Latn)": 39.02, + "MassiveIntentClassification (cym-Latn)": 34.54, + "MassiveIntentClassification (nld-Latn)": 40.2, + "MassiveIntentClassification (hin-Deva)": 17.7, + "MassiveIntentClassification (afr-Latn)": 37.45, + "MassiveIntentClassification (ell-Grek)": 24.19, + "MassiveIntentClassification (mal-Mlym)": 2.87, + "MassiveIntentClassification (por-Latn)": 43.76, + "MassiveIntentClassification (sqi-Latn)": 40.7, + "MassiveIntentClassification (urd-Arab)": 14.42, + "MassiveIntentClassification (vie-Latn)": 37.09, + "MassiveIntentClassification (hun-Latn)": 35.69, + "MassiveIntentClassification (ron-Latn)": 40.54, + "MassiveIntentClassification (ara-Arab)": 19.05, + "MassiveIntentClassification (nob-Latn)": 39.36, + "MassiveIntentClassification (slv-Latn)": 36.7, + "MassiveIntentClassification (lav-Latn)": 36.97, + "MassiveIntentClassification (heb-Hebr)": 22.48, + "MassiveIntentClassification (pol-Latn)": 36.07, + "MassiveIntentClassification (ita-Latn)": 41.59, + "MassiveIntentClassification (msa-Latn)": 35.07, + "MassiveIntentClassification (mya-Mymr)": 4.24, + "MassiveIntentClassification (isl-Latn)": 29.95, + "MassiveIntentClassification (tel-Telu)": 2.46, + "MassiveIntentClassification (swa-Latn)": 34.98, + "MassiveIntentClassification (amh-Ethi)": 2.62, + "MassiveIntentClassification (cmo-Hant)": 22.56, + "MassiveIntentClassification (tha-Thai)": 11.26, + "MassiveIntentClassification (ben-Beng)": 13.1, + "MassiveIntentClassification (fin-Latn)": 38.37, + "MassiveIntentClassification (fra-Latn)": 42.55, + "MassiveIntentClassification (kor-Kore)": 16.05, + "MassiveIntentClassification (kat-Geor)": 9.07, + "MassiveIntentClassification (dan-Latn)": 41.0, + "MassiveIntentClassification (tur-Latn)": 33.76, + "MassiveIntentClassification (tgl-Latn)": 37.92, + "MassiveIntentClassification (jav-Latn)": 35.91, + "MassiveScenarioClassification (en)": 73.81, + "MassiveScenarioClassification (da)": 47.01, + "MassiveScenarioClassification (nb)": 44.67, + "MassiveScenarioClassification (sv)": 42.93, + "MassiveScenarioClassification (mal-Mlym)": 7.67, + "MassiveScenarioClassification (khm-Khmr)": 9.25, + "MassiveScenarioClassification (deu-Latn)": 51.47, + "MassiveScenarioClassification (msa-Latn)": 43.67, + "MassiveScenarioClassification (heb-Hebr)": 24.01, + "MassiveScenarioClassification (mon-Cyrl)": 25.47, + "MassiveScenarioClassification (mya-Mymr)": 10.61, + "MassiveScenarioClassification (ind-Latn)": 43.46, + "MassiveScenarioClassification (nob-Latn)": 44.67, + "MassiveScenarioClassification (fra-Latn)": 51.14, + "MassiveScenarioClassification (tgl-Latn)": 45.69, + "MassiveScenarioClassification (amh-Ethi)": 7.57, + "MassiveScenarioClassification (fas-Arab)": 23.97, + "MassiveScenarioClassification (vie-Latn)": 40.47, + "MassiveScenarioClassification (sqi-Latn)": 47.21, + "MassiveScenarioClassification (dan-Latn)": 47.02, + "MassiveScenarioClassification (spa-Latn)": 49.0, + "MassiveScenarioClassification (pol-Latn)": 43.82, + "MassiveScenarioClassification (tel-Telu)": 7.95, + "MassiveScenarioClassification (tha-Thai)": 19.5, + "MassiveScenarioClassification (kor-Kore)": 20.3, + "MassiveScenarioClassification (cmo-Hans)": 33.65, + "MassiveScenarioClassification (urd-Arab)": 23.73, + "MassiveScenarioClassification (aze-Latn)": 35.59, + "MassiveScenarioClassification (ron-Latn)": 48.23, + "MassiveScenarioClassification (jav-Latn)": 43.59, + "MassiveScenarioClassification (slv-Latn)": 41.9, + "MassiveScenarioClassification (kat-Geor)": 14.92, + "MassiveScenarioClassification (lav-Latn)": 40.43, + "MassiveScenarioClassification (cym-Latn)": 39.0, + "MassiveScenarioClassification (swe-Latn)": 42.95, + "MassiveScenarioClassification (rus-Cyrl)": 30.46, + "MassiveScenarioClassification (ben-Beng)": 20.56, + "MassiveScenarioClassification (por-Latn)": 50.72, + "MassiveScenarioClassification (hye-Armn)": 13.03, + "MassiveScenarioClassification (jpn-Jpan)": 37.3, + "MassiveScenarioClassification (nld-Latn)": 48.43, + "MassiveScenarioClassification (swa-Latn)": 43.32, + "MassiveScenarioClassification (tam-Taml)": 17.37, + "MassiveScenarioClassification (isl-Latn)": 36.12, + "MassiveScenarioClassification (kan-Knda)": 7.85, + "MassiveScenarioClassification (ell-Grek)": 31.3, + "MassiveScenarioClassification (tur-Latn)": 38.85, + "MassiveScenarioClassification (cmo-Hant)": 31.18, + "MassiveScenarioClassification (fin-Latn)": 42.38, + "MassiveScenarioClassification (hin-Deva)": 23.71, + "MassiveScenarioClassification (ara-Arab)": 25.99, + "MassiveScenarioClassification (hun-Latn)": 41.61, + "MassiveScenarioClassification (afr-Latn)": 43.87, + "MassiveScenarioClassification (ita-Latn)": 49.8, + "MultilingualSentiment (cmn-Hans)": 41.28, + "NoRecClassification": 40.02, + "NoRecClassification (nob-Latn)": 37.93, + "NordicLangClassification": 54.71, + "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 54.7, + "NorwegianParliament": 54.8, + "OnlineShopping (cmn-Hans)": 57.74, + "PAC (pol-Latn)": 59.78, + "PolEmo2.0-IN (pol-Latn)": 40.29, + "PolEmo2.0-OUT (pol-Latn)": 25.0, + "RuReviewsClassification (rus-Cyrl)": 41.79, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 10.08, + "RuSciBenchOECDClassification (rus-Cyrl)": 8.3, + "ScalaDaClassification": 50.03, + "ScalaNbClassification": 50.17, + "TNews (cmn-Hans)": 20.12, + "ToxicConversationsClassification": 62.09, + "TweetSentimentExtractionClassification": 54.04, + "Waimai (cmn-Hans)": 62.72 } ] }, "Clustering": { "v_measure": [ { - "Model": "bert-base-swedish-cased" + "Model": "all-MiniLM-L6-v2", + "AlloProfClusteringP2P": 51.83, + "AlloProfClusteringS2S": 32.07, + "ArxivClusteringP2P": 46.55, + "ArxivClusteringS2S": 37.86, + "BiorxivClusteringP2P": 38.37, + "BiorxivClusteringS2S": 32.88, + "GeoreviewClusteringP2P (rus-Cyrl)": 20.25, + "HALClusteringS2S": 18.84, + "MLSUMClusteringP2P": 36.74, + "MLSUMClusteringP2P (rus-Cyrl)": 23.91, + "MLSUMClusteringS2S": 28.12, + "MLSUMClusteringS2S (rus-Cyrl)": 19.07, + "MasakhaNEWSClusteringP2P (fra)": 34.92, + "MasakhaNEWSClusteringP2P (amh-Ethi)": 43.85, + "MasakhaNEWSClusteringP2P (eng)": 48.88, + "MasakhaNEWSClusteringP2P (fra-Latn)": 34.92, + "MasakhaNEWSClusteringP2P (hau-Latn)": 24.77, + "MasakhaNEWSClusteringP2P (ibo-Latn)": 45.94, + "MasakhaNEWSClusteringP2P (lin-Latn)": 69.56, + "MasakhaNEWSClusteringP2P (lug-Latn)": 49.4, + "MasakhaNEWSClusteringP2P (orm-Ethi)": 25.34, + "MasakhaNEWSClusteringP2P (pcm-Latn)": 85.57, + "MasakhaNEWSClusteringP2P (run-Latn)": 50.75, + "MasakhaNEWSClusteringP2P (sna-Latn)": 41.68, + "MasakhaNEWSClusteringP2P (som-Latn)": 29.02, + "MasakhaNEWSClusteringP2P (swa-Latn)": 21.87, + "MasakhaNEWSClusteringP2P (tir-Ethi)": 42.93, + "MasakhaNEWSClusteringP2P (xho-Latn)": 28.58, + "MasakhaNEWSClusteringP2P (yor-Latn)": 31.45, + "MasakhaNEWSClusteringS2S (fra)": 40.58, + "MasakhaNEWSClusteringS2S (amh-Ethi)": 45.44, + "MasakhaNEWSClusteringS2S (eng)": 41.09, + "MasakhaNEWSClusteringS2S (fra-Latn)": 40.58, + "MasakhaNEWSClusteringS2S (hau-Latn)": 15.42, + "MasakhaNEWSClusteringS2S (ibo-Latn)": 37.02, + "MasakhaNEWSClusteringS2S (lin-Latn)": 65.14, + "MasakhaNEWSClusteringS2S (lug-Latn)": 44.21, + "MasakhaNEWSClusteringS2S (orm-Ethi)": 24.79, + "MasakhaNEWSClusteringS2S (pcm-Latn)": 61.48, + "MasakhaNEWSClusteringS2S (run-Latn)": 51.25, + "MasakhaNEWSClusteringS2S (sna-Latn)": 42.74, + "MasakhaNEWSClusteringS2S (som-Latn)": 30.08, + "MasakhaNEWSClusteringS2S (swa-Latn)": 9.55, + "MasakhaNEWSClusteringS2S (tir-Ethi)": 46.04, + "MasakhaNEWSClusteringS2S (xho-Latn)": 27.08, + "MasakhaNEWSClusteringS2S (yor-Latn)": 31.04, + "MedrxivClusteringP2P": 34.39, + "MedrxivClusteringS2S": 31.86, + "RedditClustering": 50.7, + "RedditClusteringP2P": 54.8, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 10.21, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 9.43, + "StackExchangeClustering": 53.14, + "StackExchangeClusteringP2P": 34.26, + "TwentyNewsgroupsClustering": 46.49 } ] }, "PairClassification": { "ap": [ { - "Model": "bert-base-swedish-cased" + "Model": "all-MiniLM-L6-v2", + "CDSC-E (pol-Latn)": 47.27, + "OpusparcusPC (fr)": 86.53, + "OpusparcusPC (deu-Latn)": 89.91, + "OpusparcusPC (en)": 97.46, + "OpusparcusPC (fin-Latn)": 85.44, + "OpusparcusPC (fra-Latn)": 86.53, + "OpusparcusPC (rus-Cyrl)": 79.28, + "OpusparcusPC (swe-Latn)": 83.78, + "PSC (pol-Latn)": 81.87, + "PawsXPairClassification (fr)": 55.4, + "PawsXPairClassification (deu-Latn)": 51.22, + "PawsXPairClassification (en)": 59.1, + "PawsXPairClassification (spa-Latn)": 52.21, + "PawsXPairClassification (fra-Latn)": 55.41, + "PawsXPairClassification (jpn-Hira)": 48.97, + "PawsXPairClassification (kor-Hang)": 50.53, + "PawsXPairClassification (cmn-Hans)": 53.11, + "SICK-E-PL (pol-Latn)": 47.32, + "SprintDuplicateQuestions": 94.55, + "TERRa (rus-Cyrl)": 45.03, + "TwitterSemEval2015": 67.86, + "TwitterURLCorpus": 84.7 } ] }, "Reranking": { "map": [ { - "Model": "bert-base-swedish-cased" + "Model": "all-MiniLM-L6-v2", + "AlloprofReranking": 31.69, + "AlloprofReranking (fra-Latn)": 62.62, + "AskUbuntuDupQuestions": 63.48, + "MMarcoReranking (cmn-Hans)": 4.74, + "MindSmallReranking": 30.8, + "RuBQReranking (rus-Cyrl)": 27.05, + "SciDocsRR": 87.12, + "StackOverflowDupQuestions": 50.76, + "SyntecReranking": 59.57, + "SyntecReranking (fra-Latn)": 67.31, + "T2Reranking (cmn-Hans)": 56.26 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "bert-base-swedish-cased" + "Model": "all-MiniLM-L6-v2", + "AILACasedocs": 19.72, + "AILAStatutes": 20.52, + "ARCChallenge": 9.48, + "AlloprofRetrieval": 28.41, + "AlloprofRetrieval (fra-Latn)": 28.41, + "AlphaNLI": 28.19, + "ArguAna": 50.17, + "ArguAna-PL (pol-Latn)": 11.5, + "BSARDRetrieval": 0.0, + "BSARDRetrieval (fra-Latn)": 4.8, + "CQADupstackRetrieval": 41.32, + "ClimateFEVER": 20.27, + "CmedqaRetrieval (cmn-Hans)": 2.03, + "CovidRetrieval (cmn-Hans)": 0.8, + "DBPedia": 32.33, + "DuRetrieval (cmn-Hans)": 3.03, + "EcomRetrieval (cmn-Hans)": 3.7, + "FEVER": 51.93, + "FiQA-PL (pol-Latn)": 2.29, + "FiQA2018": 36.87, + "GerDaLIRSmall (deu-Latn)": 2.41, + "HellaSwag": 24.21, + "HotpotQA": 46.51, + "LEMBNarrativeQARetrieval": 18.27, + "LEMBNeedleRetrieval": 20.0, + "LEMBPasskeyRetrieval": 23.25, + "LEMBQMSumRetrieval": 16.32, + "LEMBSummScreenFDRetrieval": 54.8, + "LEMBWikimQARetrieval": 46.23, + "LeCaRDv2 (zho-Hans)": 17.5, + "LegalBenchConsumerContractsQA": 65.6, + "LegalBenchCorporateLobbying": 86.41, + "LegalQuAD (deu-Latn)": 11.81, + "LegalSummarization": 59.0, + "MMarcoRetrieval (cmn-Hans)": 6.21, + "MSMARCO": 36.54, + "MedicalRetrieval (cmn-Hans)": 1.76, + "MintakaRetrieval (fr)": 9.19, + "MintakaRetrieval (ara-Arab)": 2.22, + "MintakaRetrieval (deu-Latn)": 15.43, + "MintakaRetrieval (spa-Latn)": 7.72, + "MintakaRetrieval (fra-Latn)": 9.19, + "MintakaRetrieval (hin-Deva)": 2.65, + "MintakaRetrieval (ita-Latn)": 8.48, + "MintakaRetrieval (jpn-Hira)": 6.7, + "MintakaRetrieval (por-Latn)": 9.76, + "NFCorpus": 31.59, + "NFCorpus-PL (pol-Latn)": 10.62, + "NQ": 43.87, + "PIQA": 25.28, + "Quail": 3.92, + "QuoraRetrieval": 87.56, + "RARbCode": 44.27, + "RARbMath": 68.19, + "RiaNewsRetrieval (rus-Cyrl)": 0.67, + "RuBQRetrieval (rus-Cyrl)": 2.64, + "SCIDOCS": 21.64, + "SCIDOCS-PL (pol-Latn)": 3.75, + "SIQA": 1.56, + "SciFact": 64.51, + "SciFact-PL (pol-Latn)": 16.14, + "SpartQA": 1.65, + "SyntecRetrieval": 60.15, + "SyntecRetrieval (fra-Latn)": 60.15, + "T2Retrieval (cmn-Hans)": 1.6, + "TRECCOVID": 47.25, + "TRECCOVID-PL (pol-Latn)": 8.66, + "TempReasonL1": 1.53, + "TempReasonL2Fact": 17.65, + "TempReasonL2Pure": 0.46, + "TempReasonL3Fact": 14.16, + "TempReasonL3Pure": 6.33, + "Touche2020": 16.9, + "VideoRetrieval (cmn-Hans)": 9.79, + "WinoGrande": 47.33, + "XPQARetrieval (fr)": 51.79, + "XPQARetrieval (ara-Arab_ara-Arab)": 8.03, + "XPQARetrieval (eng-Latn_ara-Arab)": 1.86, + "XPQARetrieval (ara-Arab_eng-Latn)": 6.87, + "XPQARetrieval (deu-Latn_deu-Latn)": 53.25, + "XPQARetrieval (eng-Latn_deu-Latn)": 10.99, + "XPQARetrieval (deu-Latn_eng-Latn)": 27.59, + "XPQARetrieval (spa-Latn_spa-Latn)": 38.87, + "XPQARetrieval (eng-Latn_spa-Latn)": 5.46, + "XPQARetrieval (spa-Latn_eng-Latn)": 22.2, + "XPQARetrieval (fra-Latn_fra-Latn)": 51.79, + "XPQARetrieval (eng-Latn_fra-Latn)": 8.57, + "XPQARetrieval (fra-Latn_eng-Latn)": 31.36, + "XPQARetrieval (hin-Deva_hin-Deva)": 35.3, + "XPQARetrieval (eng-Latn_hin-Deva)": 6.28, + "XPQARetrieval (hin-Deva_eng-Latn)": 6.0, + "XPQARetrieval (ita-Latn_ita-Latn)": 54.57, + "XPQARetrieval (eng-Latn_ita-Latn)": 6.79, + "XPQARetrieval (ita-Latn_eng-Latn)": 24.13, + "XPQARetrieval (jpn-Hira_jpn-Hira)": 39.23, + "XPQARetrieval (eng-Latn_jpn-Hira)": 4.1, + "XPQARetrieval (jpn-Hira_eng-Latn)": 13.05, + "XPQARetrieval (kor-Hang_kor-Hang)": 10.24, + "XPQARetrieval (eng-Latn_kor-Hang)": 5.72, + "XPQARetrieval (kor-Hang_eng-Latn)": 6.37, + "XPQARetrieval (pol-Latn_pol-Latn)": 22.33, + "XPQARetrieval (eng-Latn_pol-Latn)": 7.58, + "XPQARetrieval (pol-Latn_eng-Latn)": 14.43, + "XPQARetrieval (por-Latn_por-Latn)": 31.93, + "XPQARetrieval (eng-Latn_por-Latn)": 5.9, + "XPQARetrieval (por-Latn_eng-Latn)": 20.74, + "XPQARetrieval (tam-Taml_tam-Taml)": 7.43, + "XPQARetrieval (eng-Latn_tam-Taml)": 3.42, + "XPQARetrieval (tam-Taml_eng-Latn)": 2.91, + "XPQARetrieval (cmn-Hans_cmn-Hans)": 19.39, + "XPQARetrieval (eng-Latn_cmn-Hans)": 5.05, + "XPQARetrieval (cmn-Hans_eng-Latn)": 8.77 } ] }, "STS": { "spearman": [ { - "Model": "bert-base-swedish-cased" + "Model": "all-MiniLM-L6-v2", + "AFQMC (cmn-Hans)": 8.59, + "ATEC (cmn-Hans)": 13.52, + "BIOSSES": 81.64, + "BQ (cmn-Hans)": 23.84, + "CDSC-R (pol-Latn)": 79.45, + "LCQMC (cmn-Hans)": 23.85, + "PAWSX (cmn-Hans)": 7.21, + "RUParaPhraserSTS (rus-Cyrl)": 43.93, + "RuSTSBenchmarkSTS (rus-Cyrl)": 55.56, + "SICK-R": 77.58, + "SICK-R-PL (pol-Latn)": 52.43, + "SICKFr": 62.48, + "SICKFr (fra-Latn)": 62.48, + "STS12": 72.37, + "STS13": 80.6, + "STS14": 75.59, + "STS15": 85.39, + "STS16": 78.99, + "STS17 (ar-ar)": 50.89, + "STS17 (en-ar)": -4.28, + "STS17 (en-de)": 35.82, + "STS17 (en-en)": 87.59, + "STS17 (en-tr)": 4.5, + "STS17 (es-en)": 16.31, + "STS17 (es-es)": 76.12, + "STS17 (fr-en)": 37.09, + "STS17 (it-en)": 24.45, + "STS17 (ko-ko)": 43.39, + "STS17 (nl-en)": 29.0, + "STS17 (ara-Arab)": 50.89, + "STS17 (spa-Latn_eng-Latn)": 16.31, + "STS17 (kor-Hang)": 43.39, + "STS17 (eng-Latn_tur-Latn)": 4.5, + "STS17 (fra-Latn_eng-Latn)": 37.09, + "STS17 (nld-Latn_eng-Latn)": 29.0, + "STS17 (eng-Latn_ara-Arab)": -4.28, + "STS17 (spa-Latn)": 76.12, + "STS17 (eng-Latn_deu-Latn)": 35.82, + "STS17 (ita-Latn_eng-Latn)": 24.45, + "STS22 (ar)": 22.64, + "STS22 (de)": 31.04, + "STS22 (de-en)": 44.04, + "STS22 (de-fr)": 30.07, + "STS22 (de-pl)": 4.93, + "STS22 (en)": 67.71, + "STS22 (es)": 54.78, + "STS22 (es-en)": 53.42, + "STS22 (es-it)": 44.27, + "STS22 (fr)": 77.0, + "STS22 (fr-pl)": 50.71, + "STS22 (it)": 60.4, + "STS22 (pl)": 26.77, + "STS22 (pl-en)": 32.8, + "STS22 (ru)": 14.72, + "STS22 (tr)": 33.69, + "STS22 (zh)": 44.93, + "STS22 (zh-en)": 41.64, + "STS22 (tur-Latn)": 33.69, + "STS22 (spa-Latn)": 54.78, + "STS22 (ara-Arab)": 22.64, + "STS22 (deu-Latn_pol-Latn)": -4.93, + "STS22 (spa-Latn_eng-Latn)": 53.42, + "STS22 (cmn-Hans_eng-Latn)": 41.64, + "STS22 (rus-Cyrl)": 14.72, + "STS22 (spa-Latn_ita-Latn)": 44.27, + "STS22 (deu-Latn_fra-Latn)": 30.07, + "STS22 (deu-Latn)": 31.04, + "STS22 (fra-Latn_pol-Latn)": 50.71, + "STS22 (pol-Latn)": 26.77, + "STS22 (pol-Latn_eng-Latn)": 32.8, + "STS22 (deu-Latn_eng-Latn)": 44.04, + "STS22 (ita-Latn)": 60.4, + "STS22 (fra-Latn)": 77.0, + "STS22 (cmn-Hans)": 44.93, + "STSB (cmn-Hans)": 37.8, + "STSBenchmark": 82.03, + "STSBenchmarkMultilingualSTS (fr)": 64.93, + "STSBenchmarkMultilingualSTS (pol-Latn)": 56.42, + "STSBenchmarkMultilingualSTS (por-Latn)": 61.56, + "STSBenchmarkMultilingualSTS (ita-Latn)": 59.24, + "STSBenchmarkMultilingualSTS (fra-Latn)": 64.93, + "STSBenchmarkMultilingualSTS (deu-Latn)": 62.4, + "STSBenchmarkMultilingualSTS (cmn-Hans)": 39.74, + "STSBenchmarkMultilingualSTS (spa-Latn)": 61.62, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 55.55, + "STSBenchmarkMultilingualSTS (en)": 82.03, + "STSBenchmarkMultilingualSTS (nld-Latn)": 55.46 } ] }, "Summarization": { "spearman": [ { - "Model": "bert-base-swedish-cased" + "Model": "all-MiniLM-L6-v2", + "SummEval": 30.81, + "SummEvalFr": 28.28, + "SummEvalFr (fra-Latn)": 28.29 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "bert-base-swedish-cased" + "Model": "all-MiniLM-L6-v2" } ] } }, - "e5-small": { + "text-embedding-3-small": { "BitextMining": { "f1": [ { - "Model": "e5-small", - "BornholmBitextMining": 40.27 + "Model": "text-embedding-3-small" } ] }, "Classification": { "accuracy": [ { - "Model": "e5-small", - "AngryTweetsClassification": 43.6, - "DKHateClassification": 57.57, - "DanishPoliticalCommentsClassification": 28.37, - "LccSentimentClassification": 40.27, - "MassiveIntentClassification (da)": 41.89, - "MassiveIntentClassification (nb)": 40.25, - "MassiveIntentClassification (sv)": 40.07, - "MassiveScenarioClassification (da)": 49.93, - "MassiveScenarioClassification (nb)": 48.58, - "MassiveScenarioClassification (sv)": 47.06, - "NoRecClassification": 41.84, - "NordicLangClassification": 53.47, - "NorwegianParliament": 56.57, - "ScalaDaClassification": 50.15, - "ScalaNbClassification": 50.03 + "Model": "text-embedding-3-small", + "AmazonCounterfactualClassification (en)": 76.42, + "AmazonPolarityClassification": 90.84, + "AmazonReviewsClassification (en)": 45.73, + "Banking77Classification": 83.01, + "EmotionClassification": 50.63, + "ImdbClassification": 83.66, + "MTOPDomainClassification (en)": 93.91, + "MTOPIntentClassification (en)": 70.98, + "MassiveIntentClassification (en)": 72.86, + "MassiveScenarioClassification (en)": 76.84, + "ToxicConversationsClassification": 71.91, + "TweetSentimentExtractionClassification": 61.72 } ] }, "Clustering": { "v_measure": [ { - "Model": "e5-small", - "BiorxivClusteringP2P": 36.1, - "BiorxivClusteringS2S": 31.51, - "MedrxivClusteringP2P": 31.31, - "MedrxivClusteringS2S": 28.32, - "RedditClustering": 43.27, - "RedditClusteringP2P": 57.22, - "StackExchangeClustering": 59.6, - "StackExchangeClusteringP2P": 30.82, - "TwentyNewsgroupsClustering": 37.65 + "Model": "text-embedding-3-small", + "ArxivClusteringP2P": 46.57, + "ArxivClusteringS2S": 39.35, + "BiorxivClusteringP2P": 37.77, + "BiorxivClusteringS2S": 34.68, + "MedrxivClusteringP2P": 32.77, + "MedrxivClusteringS2S": 31.85, + "RedditClustering": 64.09, + "RedditClusteringP2P": 65.12, + "StackExchangeClustering": 72.05, + "StackExchangeClusteringP2P": 34.04, + "TwentyNewsgroupsClustering": 54.81 } ] }, "PairClassification": { "ap": [ { - "Model": "e5-small" + "Model": "text-embedding-3-small", + "OpusparcusPC (fr)": 94.45, + "SprintDuplicateQuestions": 94.58, + "TwitterSemEval2015": 73.33, + "TwitterURLCorpus": 87.21 } ] }, "Reranking": { "map": [ { - "Model": "e5-small" + "Model": "text-embedding-3-small", + "AskUbuntuDupQuestions": 62.18, + "MindSmallReranking": 29.93, + "SciDocsRR": 83.25, + "StackOverflowDupQuestions": 51.53 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "e5-small" + "Model": "text-embedding-3-small", + "ARCChallenge": 14.63, + "AlphaNLI": 30.61, + "ArguAna": 55.49, + "CQADupstackRetrieval": 42.58, + "ClimateFEVER": 26.86, + "DBPedia": 39.97, + "FEVER": 79.42, + "FiQA2018": 44.91, + "HellaSwag": 30.94, + "HotpotQA": 63.63, + "MSMARCO": 37.02, + "NFCorpus": 38.33, + "NQ": 52.86, + "PIQA": 33.69, + "Quail": 6.11, + "QuoraRetrieval": 88.83, + "RARbCode": 72.03, + "RARbMath": 71.07, + "SCIDOCS": 20.8, + "SIQA": 3.03, + "SciFact": 73.37, + "SpartQA": 6.63, + "TRECCOVID": 77.9, + "TempReasonL1": 2.35, + "TempReasonL2Fact": 25.68, + "TempReasonL2Pure": 2.76, + "TempReasonL3Fact": 22.09, + "TempReasonL3Pure": 9.79, + "Touche2020": 24.28, + "WinoGrande": 31.53 } ] }, "STS": { "spearman": [ { - "Model": "e5-small" + "Model": "text-embedding-3-small", + "BIOSSES": 88.72, + "SICK-R": 76.73, + "STS12": 73.09, + "STS13": 84.92, + "STS14": 79.81, + "STS15": 88.01, + "STS16": 84.41, + "STS17 (en-en)": 90.94, + "STS22 (en)": 64.96, + "STSBenchmark": 84.24 } ] }, "Summarization": { "spearman": [ { - "Model": "e5-small" + "Model": "text-embedding-3-small", + "SummEval": 31.12 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "e5-small" + "Model": "text-embedding-3-small" } ] } }, - "universal-sentence-encoder-multilingual-large-3": { + "monot5-3b-msmarco-10k": { "BitextMining": { "f1": [ { - "Model": "universal-sentence-encoder-multilingual-large-3" + "Model": "monot5-3b-msmarco-10k" } ] }, "Classification": { "accuracy": [ { - "Model": "universal-sentence-encoder-multilingual-large-3", - "AmazonReviewsClassification (fr)": 35.09, - "MTOPDomainClassification (fr)": 88.19, - "MTOPIntentClassification (fr)": 63.64, - "MasakhaNEWSClassification (fra)": 72.04, - "MassiveIntentClassification (fr)": 65.8, - "MassiveScenarioClassification (fr)": 73.47 + "Model": "monot5-3b-msmarco-10k" } ] }, "Clustering": { "v_measure": [ { - "Model": "universal-sentence-encoder-multilingual-large-3", - "AlloProfClusteringP2P": 54.21, - "AlloProfClusteringS2S": 37.95, - "HALClusteringS2S": 18.94, - "MLSUMClusteringP2P": 41.02, - "MLSUMClusteringS2S": 37.97, - "MasakhaNEWSClusteringP2P (fra)": 24.09, - "MasakhaNEWSClusteringS2S (fra)": 40.24 + "Model": "monot5-3b-msmarco-10k" } ] }, "PairClassification": { "ap": [ { - "Model": "universal-sentence-encoder-multilingual-large-3", - "OpusparcusPC (fr)": 93.38, - "PawsXPairClassification (fr)": 53.62 + "Model": "monot5-3b-msmarco-10k" } ] }, "Reranking": { "map": [ { - "Model": "universal-sentence-encoder-multilingual-large-3", - "AlloprofReranking": 55.39, - "SyntecReranking": 77.13 + "Model": "monot5-3b-msmarco-10k" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "universal-sentence-encoder-multilingual-large-3", - "AlloprofRetrieval": 33.78, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 26.21, - "SyntecRetrieval": 63.69, - "XPQARetrieval (fr)": 65.21 + "Model": "monot5-3b-msmarco-10k" } ] }, "STS": { "spearman": [ { - "Model": "universal-sentence-encoder-multilingual-large-3", - "SICKFr": 74.39, - "STS22 (fr)": 71.11, - "STSBenchmarkMultilingualSTS (fr)": 78.16 + "Model": "monot5-3b-msmarco-10k" } ] }, "Summarization": { "spearman": [ { - "Model": "universal-sentence-encoder-multilingual-large-3", - "SummEvalFr": 28.56 + "Model": "monot5-3b-msmarco-10k" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "universal-sentence-encoder-multilingual-large-3" + "Model": "monot5-3b-msmarco-10k", + "Core17InstructionRetrieval": 1.84, + "News21InstructionRetrieval": 1.78, + "Robust04InstructionRetrieval": 3.96 } ] } }, - "multilingual-e5-small": { + "nomic-embed-text-v1.5-64": { "BitextMining": { "f1": [ { - "Model": "multilingual-e5-small", - "BornholmBitextMining (dan-Latn)": 37.15, - "BornholmBitextMining": 43.89, - "Tatoeba (swh-Latn_eng-Latn)": 65.43, - "Tatoeba (jpn-Jpan_eng-Latn)": 77.43, - "Tatoeba (tuk-Latn_eng-Latn)": 16.99, - "Tatoeba (lat-Latn_eng-Latn)": 37.76, - "Tatoeba (mal-Mlym_eng-Latn)": 94.78, - "Tatoeba (ast-Latn_eng-Latn)": 62.81, - "Tatoeba (est-Latn_eng-Latn)": 56.47, - "Tatoeba (cym-Latn_eng-Latn)": 62.3, - "Tatoeba (pol-Latn_eng-Latn)": 88.85, - "Tatoeba (ukr-Cyrl_eng-Latn)": 82.98, - "Tatoeba (ido-Latn_eng-Latn)": 70.07, - "Tatoeba (zsm-Latn_eng-Latn)": 91.37, - "Tatoeba (bul-Cyrl_eng-Latn)": 85.47, - "Tatoeba (dsb-Latn_eng-Latn)": 29.87, - "Tatoeba (tha-Thai_eng-Latn)": 90.88, - "Tatoeba (arz-Arab_eng-Latn)": 53.35, - "Tatoeba (cbk-Latn_eng-Latn)": 55.36, - "Tatoeba (pms-Latn_eng-Latn)": 35.47, - "Tatoeba (ber-Tfng_eng-Latn)": 18.22, - "Tatoeba (slk-Latn_eng-Latn)": 79.86, - "Tatoeba (ang-Latn_eng-Latn)": 30.3, - "Tatoeba (ind-Latn_eng-Latn)": 88.28, - "Tatoeba (cha-Latn_eng-Latn)": 24.88, - "Tatoeba (slv-Latn_eng-Latn)": 73.93, - "Tatoeba (kab-Latn_eng-Latn)": 18.06, - "Tatoeba (ina-Latn_eng-Latn)": 86.39, - "Tatoeba (lfn-Latn_eng-Latn)": 51.46, - "Tatoeba (hye-Armn_eng-Latn)": 83.81, - "Tatoeba (war-Latn_eng-Latn)": 39.14, - "Tatoeba (dtp-Latn_eng-Latn)": 6.42, - "Tatoeba (nds-Latn_eng-Latn)": 52.46, - "Tatoeba (urd-Arab_eng-Latn)": 85.07, - "Tatoeba (rus-Cyrl_eng-Latn)": 89.77, - "Tatoeba (fao-Latn_eng-Latn)": 56.57, - "Tatoeba (cat-Latn_eng-Latn)": 79.3, - "Tatoeba (gla-Latn_eng-Latn)": 35.96, - "Tatoeba (kur-Latn_eng-Latn)": 39.99, - "Tatoeba (cor-Latn_eng-Latn)": 5.24, - "Tatoeba (nov-Latn_eng-Latn)": 64.2, - "Tatoeba (max-Deva_eng-Latn)": 48.29, - "Tatoeba (nno-Latn_eng-Latn)": 70.29, - "Tatoeba (kor-Hang_eng-Latn)": 73.74, - "Tatoeba (vie-Latn_eng-Latn)": 89.03, - "Tatoeba (tur-Latn_eng-Latn)": 88.42, - "Tatoeba (spa-Latn_eng-Latn)": 93.01, - "Tatoeba (gsw-Latn_eng-Latn)": 40.13, - "Tatoeba (yid-Hebr_eng-Latn)": 65.9, - "Tatoeba (orv-Cyrl_eng-Latn)": 14.89, - "Tatoeba (wuu-Hans_eng-Latn)": 67.3, - "Tatoeba (heb-Hebr_eng-Latn)": 73.68, - "Tatoeba (arq-Arab_eng-Latn)": 23.62, - "Tatoeba (nld-Latn_eng-Latn)": 91.87, - "Tatoeba (kaz-Cyrl_eng-Latn)": 70.57, - "Tatoeba (mon-Cyrl_eng-Latn)": 77.7, - "Tatoeba (fin-Latn_eng-Latn)": 70.23, - "Tatoeba (hrv-Latn_eng-Latn)": 84.42, - "Tatoeba (fra-Latn_eng-Latn)": 90.51, - "Tatoeba (khm-Khmr_eng-Latn)": 44.34, - "Tatoeba (amh-Ethi_eng-Latn)": 74.11, - "Tatoeba (eus-Latn_eng-Latn)": 50.9, - "Tatoeba (lvs-Latn_eng-Latn)": 61.84, - "Tatoeba (pes-Arab_eng-Latn)": 85.51, - "Tatoeba (tzl-Latn_eng-Latn)": 34.83, - "Tatoeba (oci-Latn_eng-Latn)": 38.27, - "Tatoeba (ell-Grek_eng-Latn)": 86.81, - "Tatoeba (tgl-Latn_eng-Latn)": 77.54, - "Tatoeba (uig-Arab_eng-Latn)": 60.59, - "Tatoeba (ben-Beng_eng-Latn)": 81.4, - "Tatoeba (uzb-Latn_eng-Latn)": 59.11, - "Tatoeba (epo-Latn_eng-Latn)": 88.96, - "Tatoeba (sqi-Latn_eng-Latn)": 86.21, - "Tatoeba (kzj-Latn_eng-Latn)": 6.56, - "Tatoeba (mkd-Cyrl_eng-Latn)": 63.74, - "Tatoeba (bre-Latn_eng-Latn)": 7.09, - "Tatoeba (dan-Latn_eng-Latn)": 86.38, - "Tatoeba (mhr-Cyrl_eng-Latn)": 5.58, - "Tatoeba (csb-Latn_eng-Latn)": 26.23, - "Tatoeba (xho-Latn_eng-Latn)": 63.2, - "Tatoeba (swe-Latn_eng-Latn)": 87.46, - "Tatoeba (tat-Cyrl_eng-Latn)": 66.8, - "Tatoeba (srp-Cyrl_eng-Latn)": 83.06, - "Tatoeba (cmn-Hans_eng-Latn)": 89.85, - "Tatoeba (ces-Latn_eng-Latn)": 80.99, - "Tatoeba (bel-Cyrl_eng-Latn)": 80.89, - "Tatoeba (yue-Hant_eng-Latn)": 69.33, - "Tatoeba (lit-Latn_eng-Latn)": 59.95, - "Tatoeba (tel-Telu_eng-Latn)": 86.82, - "Tatoeba (nob-Latn_eng-Latn)": 90.18, - "Tatoeba (mar-Deva_eng-Latn)": 85.94, - "Tatoeba (ara-Arab_eng-Latn)": 76.09, - "Tatoeba (swg-Latn_eng-Latn)": 44.0, - "Tatoeba (bos-Latn_eng-Latn)": 81.15, - "Tatoeba (pam-Latn_eng-Latn)": 5.76, - "Tatoeba (fry-Latn_eng-Latn)": 49.05, - "Tatoeba (hun-Latn_eng-Latn)": 74.44, - "Tatoeba (ron-Latn_eng-Latn)": 85.68, - "Tatoeba (afr-Latn_eng-Latn)": 85.17, - "Tatoeba (isl-Latn_eng-Latn)": 62.32, - "Tatoeba (aze-Latn_eng-Latn)": 80.79, - "Tatoeba (hsb-Latn_eng-Latn)": 36.49, - "Tatoeba (tam-Taml_eng-Latn)": 82.82, - "Tatoeba (ceb-Latn_eng-Latn)": 42.35, - "Tatoeba (jav-Latn_eng-Latn)": 53.39, - "Tatoeba (glg-Latn_eng-Latn)": 79.65, - "Tatoeba (por-Latn_eng-Latn)": 89.63, - "Tatoeba (awa-Deva_eng-Latn)": 74.55, - "Tatoeba (hin-Deva_eng-Latn)": 92.36, - "Tatoeba (ita-Latn_eng-Latn)": 88.54, - "Tatoeba (deu-Latn_eng-Latn)": 97.22, - "Tatoeba (gle-Latn_eng-Latn)": 56.32, - "Tatoeba (kat-Geor_eng-Latn)": 77.6, - "Tatoeba (ile-Latn_eng-Latn)": 70.31 + "Model": "nomic-embed-text-v1.5-64" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "nomic-embed-text-v1.5-64", + "AmazonCounterfactualClassification (en)": 66.85, + "AmazonPolarityClassification": 85.92, + "AmazonReviewsClassification (en)": 41.02, + "Banking77Classification": 80.63, + "EmotionClassification": 40.55, + "ImdbClassification": 76.6, + "MTOPDomainClassification (en)": 86.31, + "MTOPIntentClassification (en)": 62.77, + "MassiveIntentClassification (en)": 64.95, + "MassiveScenarioClassification (en)": 70.38, + "ToxicConversationsClassification": 66.53, + "TweetSentimentExtractionClassification": 55.23 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "nomic-embed-text-v1.5-64", + "ArxivClusteringP2P": 41.8, + "ArxivClusteringS2S": 32.41, + "BiorxivClusteringP2P": 34.81, + "BiorxivClusteringS2S": 28.59, + "MedrxivClusteringP2P": 32.73, + "MedrxivClusteringS2S": 29.91, + "RedditClustering": 50.31, + "RedditClusteringP2P": 56.57, + "StackExchangeClustering": 57.99, + "StackExchangeClusteringP2P": 33.64, + "TwentyNewsgroupsClustering": 44.61 } ] }, - "Classification": { - "accuracy": [ + "PairClassification": { + "ap": [ { - "Model": "multilingual-e5-small", - "AllegroReviews (pol-Latn)": 37.33, - "AllegroReviews": 37.42, - "AmazonCounterfactualClassification (en-ext)": 73.07, - "AmazonCounterfactualClassification (en)": 71.87, - "AmazonCounterfactualClassification (deu-Latn)": 71.72, - "AmazonCounterfactualClassification (jpn-Jpan)": 61.46, - "AmazonPolarityClassification": 88.61, - "AmazonReviewsClassification (en)": 45.75, - "AmazonReviewsClassification (deu-Latn)": 41.07, - "AmazonReviewsClassification (spa-Latn)": 41.37, - "AmazonReviewsClassification (fra-Latn)": 39.47, - "AmazonReviewsClassification (jpn-Jpan)": 38.55, - "AmazonReviewsClassification (cmn-Hans)": 38.31, - "AmazonReviewsClassification (fr)": 39.68, - "AngryTweetsClassification (dan-Latn)": 56.27, - "AngryTweetsClassification": 53.57, - "Banking77Classification": 70.44, - "CBD (pol-Latn)": 63.33, - "CBD": 63.25, - "DKHateClassification": 60.73, - "DanishPoliticalCommentsClassification (dan-Latn)": 34.82, - "DanishPoliticalCommentsClassification": 34.38, - "EmotionClassification": 42.86, - "GeoreviewClassification (rus-Cyrl)": 44.66, - "HeadlineClassification (rus-Cyrl)": 73.94, - "IFlyTek (cmn-Hans)": 40.74, - "IFlyTek": 47.35, - "ImdbClassification": 79.57, - "InappropriatenessClassification (rus-Cyrl)": 59.16, - "JDReview (cmn-Hans)": 78.37, - "JDReview": 79.34, - "KinopoiskClassification (rus-Cyrl)": 49.96, - "LccSentimentClassification (dan-Latn)": 58.6, - "LccSentimentClassification": 57.87, - "MTOPDomainClassification (en)": 88.99, - "MTOPDomainClassification (deu-Latn)": 86.15, - "MTOPDomainClassification (spa-Latn)": 85.53, - "MTOPDomainClassification (fra-Latn)": 81.5, - "MTOPDomainClassification (hin-Deva)": 84.07, - "MTOPDomainClassification (tha-Thai)": 83.16, - "MTOPDomainClassification (fr)": 81.2, - "MTOPIntentClassification (en)": 56.69, - "MTOPIntentClassification (deu-Latn)": 55.88, - "MTOPIntentClassification (spa-Latn)": 53.15, - "MTOPIntentClassification (fra-Latn)": 44.35, - "MTOPIntentClassification (hin-Deva)": 52.26, - "MTOPIntentClassification (tha-Thai)": 54.61, - "MTOPIntentClassification (fr)": 46.01, - "MasakhaNEWSClassification (amh-Ethi)": 84.28, - "MasakhaNEWSClassification (eng)": 75.61, - "MasakhaNEWSClassification (fra-Latn)": 74.67, - "MasakhaNEWSClassification (hau-Latn)": 73.08, - "MasakhaNEWSClassification (ibo-Latn)": 63.9, - "MasakhaNEWSClassification (lin-Latn)": 73.37, - "MasakhaNEWSClassification (lug-Latn)": 67.89, - "MasakhaNEWSClassification (orm-Ethi)": 68.77, - "MasakhaNEWSClassification (pcm-Latn)": 90.79, - "MasakhaNEWSClassification (run-Latn)": 75.4, - "MasakhaNEWSClassification (sna-Latn)": 82.76, - "MasakhaNEWSClassification (som-Latn)": 59.8, - "MasakhaNEWSClassification (swa-Latn)": 69.85, - "MasakhaNEWSClassification (tir-Ethi)": 68.01, - "MasakhaNEWSClassification (xho-Latn)": 72.22, - "MasakhaNEWSClassification (yor-Latn)": 73.84, - "MasakhaNEWSClassification (fra)": 77.65, - "MassiveIntentClassification (ben-Beng)": 50.68, - "MassiveIntentClassification (tur-Latn)": 56.88, - "MassiveIntentClassification (ind-Latn)": 56.2, - "MassiveIntentClassification (khm-Khmr)": 33.45, - "MassiveIntentClassification (en)": 63.87, - "MassiveIntentClassification (mal-Mlym)": 52.81, - "MassiveIntentClassification (pol-Latn)": 57.33, - "MassiveIntentClassification (lav-Latn)": 44.93, - "MassiveIntentClassification (isl-Latn)": 41.53, - "MassiveIntentClassification (sqi-Latn)": 48.68, - "MassiveIntentClassification (amh-Ethi)": 43.52, - "MassiveIntentClassification (cmo-Hans)": 62.04, - "MassiveIntentClassification (nld-Latn)": 59.27, - "MassiveIntentClassification (deu-Latn)": 55.52, - "MassiveIntentClassification (nob-Latn)": 55.36, - "MassiveIntentClassification (cmo-Hant)": 53.75, - "MassiveIntentClassification (urd-Arab)": 50.51, - "MassiveIntentClassification (slv-Latn)": 47.71, - "MassiveIntentClassification (hun-Latn)": 53.21, - "MassiveIntentClassification (jpn-Jpan)": 61.58, - "MassiveIntentClassification (swa-Latn)": 44.84, - "MassiveIntentClassification (fra-Latn)": 57.9, - "MassiveIntentClassification (spa-Latn)": 59.19, - "MassiveIntentClassification (mon-Cyrl)": 47.38, - "MassiveIntentClassification (dan-Latn)": 56.12, - "MassiveIntentClassification (msa-Latn)": 50.8, - "MassiveIntentClassification (aze-Latn)": 49.32, - "MassiveIntentClassification (fas-Arab)": 57.73, - "MassiveIntentClassification (kan-Knda)": 47.85, - "MassiveIntentClassification (kor-Kore)": 57.12, - "MassiveIntentClassification (tha-Thai)": 56.26, - "MassiveIntentClassification (heb-Hebr)": 51.11, - "MassiveIntentClassification (hin-Deva)": 55.69, - "MassiveIntentClassification (ara-Arab)": 47.78, - "MassiveIntentClassification (por-Latn)": 60.12, - "MassiveIntentClassification (vie-Latn)": 56.19, - "MassiveIntentClassification (hye-Armn)": 47.89, - "MassiveIntentClassification (ita-Latn)": 58.8, - "MassiveIntentClassification (ell-Grek)": 54.14, - "MassiveIntentClassification (cym-Latn)": 36.62, - "MassiveIntentClassification (tel-Telu)": 48.85, - "MassiveIntentClassification (kat-Geor)": 39.52, - "MassiveIntentClassification (swe-Latn)": 58.2, - "MassiveIntentClassification (tam-Taml)": 47.65, - "MassiveIntentClassification (fin-Latn)": 55.14, - "MassiveIntentClassification (tgl-Latn)": 48.7, - "MassiveIntentClassification (ron-Latn)": 52.82, - "MassiveIntentClassification (jav-Latn)": 42.96, - "MassiveIntentClassification (rus-Cyrl)": 58.43, - "MassiveIntentClassification (afr-Latn)": 48.74, - "MassiveIntentClassification (mya-Mymr)": 45.64, - "MassiveIntentClassification (da)": 54.63, - "MassiveIntentClassification (nb)": 53.96, - "MassiveIntentClassification (sv)": 56.6, - "MassiveIntentClassification (pl)": 57.4, - "MassiveScenarioClassification (nld-Latn)": 67.01, - "MassiveScenarioClassification (tur-Latn)": 62.14, - "MassiveScenarioClassification (cym-Latn)": 44.63, - "MassiveScenarioClassification (jav-Latn)": 51.39, - "MassiveScenarioClassification (hin-Deva)": 62.22, - "MassiveScenarioClassification (fra-Latn)": 63.9, - "MassiveScenarioClassification (cmo-Hans)": 68.96, - "MassiveScenarioClassification (kan-Knda)": 52.73, - "MassiveScenarioClassification (isl-Latn)": 49.66, - "MassiveScenarioClassification (jpn-Jpan)": 67.75, - "MassiveScenarioClassification (mal-Mlym)": 60.31, - "MassiveScenarioClassification (pol-Latn)": 64.27, - "MassiveScenarioClassification (mya-Mymr)": 51.07, - "MassiveScenarioClassification (slv-Latn)": 54.05, - "MassiveScenarioClassification (rus-Cyrl)": 63.89, - "MassiveScenarioClassification (urd-Arab)": 55.91, - "MassiveScenarioClassification (fas-Arab)": 63.32, - "MassiveScenarioClassification (fin-Latn)": 61.89, - "MassiveScenarioClassification (kat-Geor)": 44.96, - "MassiveScenarioClassification (sqi-Latn)": 56.15, - "MassiveScenarioClassification (en)": 69.28, - "MassiveScenarioClassification (hun-Latn)": 61.93, - "MassiveScenarioClassification (aze-Latn)": 53.27, - "MassiveScenarioClassification (heb-Hebr)": 59.22, - "MassiveScenarioClassification (kor-Kore)": 65.7, - "MassiveScenarioClassification (nob-Latn)": 61.96, - "MassiveScenarioClassification (dan-Latn)": 64.03, - "MassiveScenarioClassification (cmo-Hant)": 61.15, - "MassiveScenarioClassification (ron-Latn)": 60.0, - "MassiveScenarioClassification (amh-Ethi)": 50.53, - "MassiveScenarioClassification (spa-Latn)": 64.43, - "MassiveScenarioClassification (afr-Latn)": 58.0, - "MassiveScenarioClassification (lav-Latn)": 51.0, - "MassiveScenarioClassification (deu-Latn)": 65.88, - "MassiveScenarioClassification (ita-Latn)": 64.03, - "MassiveScenarioClassification (tha-Thai)": 65.72, - "MassiveScenarioClassification (msa-Latn)": 59.18, - "MassiveScenarioClassification (tam-Taml)": 52.74, - "MassiveScenarioClassification (ara-Arab)": 54.56, - "MassiveScenarioClassification (tgl-Latn)": 55.3, - "MassiveScenarioClassification (por-Latn)": 62.75, - "MassiveScenarioClassification (swe-Latn)": 67.33, - "MassiveScenarioClassification (tel-Telu)": 54.86, - "MassiveScenarioClassification (khm-Khmr)": 39.01, - "MassiveScenarioClassification (swa-Latn)": 52.42, - "MassiveScenarioClassification (vie-Latn)": 62.67, - "MassiveScenarioClassification (ind-Latn)": 62.0, - "MassiveScenarioClassification (hye-Armn)": 52.93, - "MassiveScenarioClassification (ben-Beng)": 57.38, - "MassiveScenarioClassification (mon-Cyrl)": 52.41, - "MassiveScenarioClassification (ell-Grek)": 62.29, - "MassiveScenarioClassification (da)": 62.34, - "MassiveScenarioClassification (nb)": 59.9, - "MassiveScenarioClassification (sv)": 65.54, - "MassiveScenarioClassification (pl)": 64.25, - "MultilingualSentiment (cmn-Hans)": 66.0, - "MultilingualSentiment": 64.74, - "NoRecClassification (nob-Latn)": 50.08, - "NoRecClassification": 53.96, - "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 72.15, - "NordicLangClassification": 75.15, - "NorwegianParliament": 60.15, - "OnlineShopping (cmn-Hans)": 88.7, - "OnlineShopping": 88.73, - "PAC (pol-Latn)": 70.48, - "PAC": 70.55, - "PolEmo2.0-IN (pol-Latn)": 67.31, - "PolEmo2.0-IN": 67.35, - "PolEmo2.0-OUT (pol-Latn)": 39.17, - "PolEmo2.0-OUT": 39.13, - "RuReviewsClassification (rus-Cyrl)": 61.18, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 54.99, - "RuSciBenchOECDClassification (rus-Cyrl)": 41.72, - "ScalaDaClassification": 50.3, - "ScalaNbClassification": 50.06, - "TNews (cmn-Hans)": 46.6, - "TNews": 48.38, - "ToxicConversationsClassification": 63.59, - "TweetSentimentExtractionClassification": 62.79, - "Waimai (cmn-Hans)": 84.15, - "Waimai": 83.9 + "Model": "nomic-embed-text-v1.5-64", + "SprintDuplicateQuestions": 90.06, + "TwitterSemEval2015": 71.68, + "TwitterURLCorpus": 85.03 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "nomic-embed-text-v1.5-64", + "AskUbuntuDupQuestions": 60.79, + "MindSmallReranking": 29.7, + "SciDocsRR": 75.79, + "StackOverflowDupQuestions": 47.42 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "nomic-embed-text-v1.5-64", + "ArguAna": 37.16, + "CQADupstackRetrieval": 28.72, + "ClimateFEVER": 31.48, + "DBPedia": 28.19, + "FEVER": 70.24, + "FiQA2018": 25.78, + "HotpotQA": 43.07, + "MSMARCO": 35.95, + "NFCorpus": 26.03, + "NQ": 45.54, + "QuoraRetrieval": 85.83, + "SCIDOCS": 12.09, + "SciFact": 52.71, + "TRECCOVID": 67.83, + "Touche2020": 23.13 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "nomic-embed-text-v1.5-64", + "BIOSSES": 77.18, + "SICK-R": 78.76, + "STS12": 77.3, + "STS13": 84.18, + "STS14": 79.37, + "STS15": 84.69, + "STS16": 83.36, + "STS17 (en-en)": 85.73, + "STS22 (en)": 63.83, + "STSBenchmark": 83.46 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "nomic-embed-text-v1.5-64", + "SummEval": 28.41 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "nomic-embed-text-v1.5-64" + } + ] + } + }, + "text-embedding-3-large": { + "BitextMining": { + "f1": [ + { + "Model": "text-embedding-3-large" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "text-embedding-3-large", + "AmazonCounterfactualClassification (en)": 78.93, + "AmazonPolarityClassification": 92.85, + "AmazonReviewsClassification (en)": 48.7, + "Banking77Classification": 85.69, + "EmotionClassification": 51.58, + "ImdbClassification": 87.67, + "MTOPDomainClassification (en)": 95.36, + "MTOPIntentClassification (en)": 75.07, + "MassiveIntentClassification (en)": 74.64, + "MassiveScenarioClassification (en)": 79.79, + "ToxicConversationsClassification": 72.92, + "TweetSentimentExtractionClassification": 62.22 } ] }, "Clustering": { "v_measure": [ { - "Model": "multilingual-e5-small", - "8TagsClustering": 23.92, - "AlloProfClusteringP2P": 60.89, - "AlloProfClusteringS2S": 32.52, - "BiorxivClusteringP2P": 35.84, - "BiorxivClusteringS2S": 27.35, - "CLSClusteringP2P": 39.14, - "CLSClusteringS2S": 37.79, - "GeoreviewClusteringP2P (rus-Cyrl)": 58.57, - "HALClusteringS2S": 18.95, - "MLSUMClusteringP2P (rus-Cyrl)": 39.69, - "MLSUMClusteringP2P": 43.2, - "MLSUMClusteringS2S (rus-Cyrl)": 39.9, - "MLSUMClusteringS2S": 37.61, - "MasakhaNEWSClusteringP2P (amh-Ethi)": 66.2, - "MasakhaNEWSClusteringP2P (eng)": 50.08, - "MasakhaNEWSClusteringP2P (fra-Latn)": 56.32, - "MasakhaNEWSClusteringP2P (hau-Latn)": 53.63, - "MasakhaNEWSClusteringP2P (ibo-Latn)": 49.19, - "MasakhaNEWSClusteringP2P (lin-Latn)": 55.06, - "MasakhaNEWSClusteringP2P (lug-Latn)": 59.97, - "MasakhaNEWSClusteringP2P (orm-Ethi)": 32.72, - "MasakhaNEWSClusteringP2P (pcm-Latn)": 62.22, - "MasakhaNEWSClusteringP2P (run-Latn)": 57.52, - "MasakhaNEWSClusteringP2P (sna-Latn)": 45.11, - "MasakhaNEWSClusteringP2P (som-Latn)": 42.39, - "MasakhaNEWSClusteringP2P (swa-Latn)": 23.77, - "MasakhaNEWSClusteringP2P (tir-Ethi)": 57.68, - "MasakhaNEWSClusteringP2P (xho-Latn)": 39.96, - "MasakhaNEWSClusteringP2P (yor-Latn)": 26.56, - "MasakhaNEWSClusteringP2P (fra)": 40.12, - "MasakhaNEWSClusteringS2S (amh-Ethi)": 55.48, - "MasakhaNEWSClusteringS2S (eng)": 37.79, - "MasakhaNEWSClusteringS2S (fra-Latn)": 35.8, - "MasakhaNEWSClusteringS2S (hau-Latn)": 20.22, - "MasakhaNEWSClusteringS2S (ibo-Latn)": 35.67, - "MasakhaNEWSClusteringS2S (lin-Latn)": 41.12, - "MasakhaNEWSClusteringS2S (lug-Latn)": 48.63, - "MasakhaNEWSClusteringS2S (orm-Ethi)": 29.16, - "MasakhaNEWSClusteringS2S (pcm-Latn)": 65.36, - "MasakhaNEWSClusteringS2S (run-Latn)": 45.5, - "MasakhaNEWSClusteringS2S (sna-Latn)": 47.61, - "MasakhaNEWSClusteringS2S (som-Latn)": 28.59, - "MasakhaNEWSClusteringS2S (swa-Latn)": 13.91, - "MasakhaNEWSClusteringS2S (tir-Ethi)": 50.51, - "MasakhaNEWSClusteringS2S (xho-Latn)": 37.26, - "MasakhaNEWSClusteringS2S (yor-Latn)": 23.38, - "MasakhaNEWSClusteringS2S (fra)": 39.22, - "MedrxivClusteringP2P": 30.72, - "MedrxivClusteringS2S": 27.0, - "RedditClustering": 40.12, - "RedditClusteringP2P": 59.49, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 51.1, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 45.29, - "StackExchangeClustering": 53.32, - "StackExchangeClusteringP2P": 31.87, - "ThuNewsClusteringP2P": 55.18, - "ThuNewsClusteringS2S": 48.93, - "TwentyNewsgroupsClustering": 33.67 + "Model": "text-embedding-3-large", + "ArxivClusteringP2P": 49.01, + "ArxivClusteringS2S": 44.45, + "BiorxivClusteringP2P": 38.03, + "BiorxivClusteringS2S": 36.53, + "MedrxivClusteringP2P": 32.7, + "MedrxivClusteringS2S": 31.27, + "RedditClustering": 67.84, + "RedditClusteringP2P": 67.96, + "StackExchangeClustering": 76.26, + "StackExchangeClusteringP2P": 36.88, + "TwentyNewsgroupsClustering": 58.14 } ] }, "PairClassification": { "ap": [ { - "Model": "multilingual-e5-small", - "CDSC-E (pol-Latn)": 69.69, - "CDSC-E": 69.7, - "Cmnli": 72.12, - "Ocnli": 60.77, - "OpusparcusPC (deu-Latn)": 94.9, - "OpusparcusPC (en)": 98.42, - "OpusparcusPC (fin-Latn)": 88.29, - "OpusparcusPC (fra-Latn)": 91.77, - "OpusparcusPC (rus-Cyrl)": 84.79, - "OpusparcusPC (swe-Latn)": 91.07, - "OpusparcusPC (fr)": 92.52, - "PPC": 86.72, - "PSC (pol-Latn)": 99.23, - "PSC": 99.24, - "PawsXPairClassification (deu-Latn)": 52.13, - "PawsXPairClassification (en)": 53.91, - "PawsXPairClassification (spa-Latn)": 51.39, - "PawsXPairClassification (fra-Latn)": 52.69, - "PawsXPairClassification (jpn-Hira)": 48.24, - "PawsXPairClassification (kor-Hang)": 49.95, - "PawsXPairClassification (cmn-Hans)": 54.01, - "PawsXPairClassification (fr)": 55.68, - "SICK-E-PL (pol-Latn)": 66.35, - "SICK-E-PL": 66.34, - "SprintDuplicateQuestions": 92.18, - "TERRa (rus-Cyrl)": 55.14, - "TwitterSemEval2015": 70.75, - "TwitterURLCorpus": 85.03 + "Model": "text-embedding-3-large", + "SprintDuplicateQuestions": 92.25, + "TwitterSemEval2015": 77.13, + "TwitterURLCorpus": 87.78 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "text-embedding-3-large", + "AskUbuntuDupQuestions": 65.03, + "MindSmallReranking": 29.86, + "SciDocsRR": 86.66, + "StackOverflowDupQuestions": 55.08 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "text-embedding-3-large", + "AILACasedocs": 39.0, + "AILAStatutes": 41.31, + "ARCChallenge": 23.98, + "AlphaNLI": 37.27, + "ArguAna": 58.05, + "BrightRetrieval (theoremqa_questions)": 22.22, + "BrightRetrieval (leetcode)": 23.65, + "BrightRetrieval (earth_science)": 26.27, + "BrightRetrieval (psychology)": 27.52, + "BrightRetrieval (robotics)": 12.93, + "BrightRetrieval (economics)": 19.98, + "BrightRetrieval (stackoverflow)": 12.49, + "BrightRetrieval (biology)": 23.67, + "BrightRetrieval (theoremqa_theorems)": 9.25, + "BrightRetrieval (pony)": 2.45, + "BrightRetrieval (sustainable_living)": 20.32, + "BrightRetrieval (aops)": 8.45, + "CQADupstackRetrieval": 47.54, + "ClimateFEVER": 30.27, + "DBPedia": 44.76, + "FEVER": 87.94, + "FiQA2018": 55.0, + "GerDaLIRSmall": 32.77, + "HellaSwag": 34.12, + "HotpotQA": 71.58, + "LEMBNarrativeQARetrieval": 44.09, + "LEMBNeedleRetrieval": 29.25, + "LEMBPasskeyRetrieval": 63.0, + "LEMBQMSumRetrieval": 32.49, + "LEMBSummScreenFDRetrieval": 84.8, + "LEMBWikimQARetrieval": 54.16, + "LeCaRDv2": 57.2, + "LegalBenchConsumerContractsQA": 79.39, + "LegalBenchCorporateLobbying": 95.09, + "LegalQuAD": 57.47, + "LegalSummarization": 71.55, + "MSMARCO": 40.24, + "NFCorpus": 42.07, + "NQ": 61.27, + "PIQA": 41.96, + "Quail": 10.15, + "QuoraRetrieval": 89.05, + "RARbCode": 89.64, + "RARbMath": 90.08, + "SCIDOCS": 23.11, + "SIQA": 3.44, + "SciFact": 77.77, + "SpartQA": 7.51, + "TRECCOVID": 79.56, + "TempReasonL1": 2.13, + "TempReasonL2Fact": 28.65, + "TempReasonL2Pure": 10.34, + "TempReasonL3Fact": 25.52, + "TempReasonL3Pure": 15.28, + "Touche2020": 23.35, + "WinoGrande": 29.11 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "text-embedding-3-large", + "BIOSSES": 84.68, + "SICK-R": 79.0, + "STS12": 72.84, + "STS13": 86.1, + "STS14": 81.15, + "STS15": 88.49, + "STS16": 85.08, + "STS17 (en-en)": 90.22, + "STS22 (en)": 66.14, + "STSBenchmark": 83.56 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "text-embedding-3-large", + "SummEval": 29.92 + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "text-embedding-3-large", + "Core17InstructionRetrieval": -0.2, + "News21InstructionRetrieval": -2.03, + "Robust04InstructionRetrieval": -5.81 + } + ] + } + }, + "DanskBERT": { + "BitextMining": { + "f1": [ + { + "Model": "DanskBERT", + "BornholmBitextMining": 6.34 } ] }, - "Reranking": { - "map": [ + "Classification": { + "accuracy": [ { - "Model": "multilingual-e5-small", - "AlloprofReranking (fra-Latn)": 64.41, - "AlloprofReranking": 56.17, - "AskUbuntuDupQuestions": 56.42, - "CMedQAv1": 63.44, - "CMedQAv2": 62.41, - "MMarcoReranking (cmn-Hans)": 29.98, - "MMarcoReranking": 24.33, - "MindSmallReranking": 29.96, - "RuBQReranking (rus-Cyrl)": 71.46, - "SciDocsRR": 78.26, - "StackOverflowDupQuestions": 46.97, - "SyntecReranking (fra-Latn)": 81.22, - "SyntecReranking": 86.7, - "T2Reranking (cmn-Hans)": 65.72, - "T2Reranking": 65.24 + "Model": "DanskBERT", + "AngryTweetsClassification": 54.28, + "DKHateClassification": 59.3, + "DanishPoliticalCommentsClassification": 39.81, + "LccSentimentClassification": 58.0, + "MassiveIntentClassification (da)": 54.68, + "MassiveIntentClassification (nb)": 45.38, + "MassiveIntentClassification (sv)": 40.82, + "MassiveScenarioClassification (da)": 59.56, + "MassiveScenarioClassification (nb)": 47.55, + "MassiveScenarioClassification (sv)": 40.14, + "NoRecClassification": 46.06, + "NordicLangClassification": 74.25, + "NorwegianParliament": 56.79, + "ScalaDaClassification": 66.59, + "ScalaNbClassification": 59.99 } ] }, - "Retrieval": { - "ndcg_at_10": [ + "Clustering": { + "v_measure": [ { - "Model": "multilingual-e5-small", - "AILACasedocs": 23.43, - "AILAStatutes": 19.01, - "ARCChallenge": 7.14, - "AlloprofRetrieval (fra-Latn)": 27.38, - "AlloprofRetrieval": 27.01, - "AlphaNLI": 13.0, - "ArguAna": 39.09, - "ArguAna-PL (pol-Latn)": 37.49, - "ArguAna-PL": 37.43, - "BSARDRetrieval (fra-Latn)": 14.54, - "BSARDRetrieval": 0.0, - "CmedqaRetrieval (cmn-Hans)": 24.36, - "CmedqaRetrieval": 24.38, - "CovidRetrieval (cmn-Hans)": 72.82, - "CovidRetrieval": 72.82, - "DBPedia-PL": 29.27, - "DuRetrieval (cmn-Hans)": 81.36, - "DuRetrieval": 81.35, - "EcomRetrieval (cmn-Hans)": 53.53, - "EcomRetrieval": 53.56, - "FiQA-PL (pol-Latn)": 22.02, - "FiQA-PL": 22.03, - "FiQA2018": 33.13, - "GerDaLIRSmall (deu-Latn)": 14.81, - "HellaSwag": 23.73, - "HotpotQA-PL": 60.15, - "LEMBNarrativeQARetrieval": 22.6, - "LEMBNeedleRetrieval": 30.75, - "LEMBPasskeyRetrieval": 38.25, - "LEMBQMSumRetrieval": 21.51, - "LEMBSummScreenFDRetrieval": 62.75, - "LEMBWikimQARetrieval": 57.13, - "LeCaRDv2 (zho-Hans)": 61.58, - "LegalBenchConsumerContractsQA": 66.98, - "LegalBenchCorporateLobbying": 89.47, - "LegalQuAD (deu-Latn)": 47.8, - "LegalSummarization": 55.76, - "MMarcoRetrieval (cmn-Hans)": 73.17, - "MMarcoRetrieval": 73.17, - "MSMARCO-PL": 26.94, - "MedicalRetrieval (cmn-Hans)": 44.84, - "MedicalRetrieval": 44.84, - "MintakaRetrieval (ara-Arab)": 21.22, - "MintakaRetrieval (deu-Latn)": 25.6, - "MintakaRetrieval (spa-Latn)": 26.4, - "MintakaRetrieval (fra-Latn)": 25.0, - "MintakaRetrieval (hin-Deva)": 21.1, - "MintakaRetrieval (ita-Latn)": 26.25, - "MintakaRetrieval (jpn-Hira)": 20.69, - "MintakaRetrieval (por-Latn)": 24.44, - "MintakaRetrieval (fr)": 22.53, - "NFCorpus": 31.0, - "NFCorpus-PL (pol-Latn)": 26.5, - "NFCorpus-PL": 26.48, - "NQ-PL": 40.46, - "PIQA": 21.08, - "Quail": 2.38, - "Quora-PL": 78.7, - "RARbCode": 46.96, - "RARbMath": 63.91, - "RiaNewsRetrieval (rus-Cyrl)": 70.01, - "RuBQRetrieval (rus-Cyrl)": 68.53, - "SCIDOCS": 13.9, - "SCIDOCS-PL (pol-Latn)": 11.59, - "SCIDOCS-PL": 11.6, - "SIQA": 2.57, - "SciFact": 67.7, - "SciFact-PL (pol-Latn)": 62.76, - "SciFact-PL": 62.76, - "SpartQA": 5.43, - "SyntecRetrieval (fra-Latn)": 73.46, - "SyntecRetrieval": 75.76, - "T2Retrieval (cmn-Hans)": 71.36, - "T2Retrieval": 71.39, - "TRECCOVID": 72.57, - "TRECCOVID-PL (pol-Latn)": 70.92, - "TRECCOVID-PL": 70.92, - "TempReasonL1": 0.8, - "TempReasonL2Fact": 36.76, - "TempReasonL2Pure": 0.62, - "TempReasonL3Fact": 32.42, - "TempReasonL3Pure": 6.36, - "Touche2020": 21.16, - "VideoRetrieval (cmn-Hans)": 58.06, - "VideoRetrieval": 58.09, - "WinoGrande": 37.46, - "XPQARetrieval (ara-Arab_ara-Arab)": 39.93, - "XPQARetrieval (eng-Latn_ara-Arab)": 18.09, - "XPQARetrieval (ara-Arab_eng-Latn)": 31.64, - "XPQARetrieval (deu-Latn_deu-Latn)": 69.43, - "XPQARetrieval (eng-Latn_deu-Latn)": 25.14, - "XPQARetrieval (deu-Latn_eng-Latn)": 52.36, - "XPQARetrieval (spa-Latn_spa-Latn)": 55.71, - "XPQARetrieval (eng-Latn_spa-Latn)": 22.5, - "XPQARetrieval (spa-Latn_eng-Latn)": 42.4, - "XPQARetrieval (fra-Latn_fra-Latn)": 57.17, - "XPQARetrieval (eng-Latn_fra-Latn)": 27.69, - "XPQARetrieval (fra-Latn_eng-Latn)": 47.46, - "XPQARetrieval (hin-Deva_hin-Deva)": 68.15, - "XPQARetrieval (eng-Latn_hin-Deva)": 25.82, - "XPQARetrieval (hin-Deva_eng-Latn)": 63.79, - "XPQARetrieval (ita-Latn_ita-Latn)": 67.71, - "XPQARetrieval (eng-Latn_ita-Latn)": 22.97, - "XPQARetrieval (ita-Latn_eng-Latn)": 46.61, - "XPQARetrieval (jpn-Hira_jpn-Hira)": 69.49, - "XPQARetrieval (eng-Latn_jpn-Hira)": 25.08, - "XPQARetrieval (jpn-Hira_eng-Latn)": 54.6, - "XPQARetrieval (kor-Hang_kor-Hang)": 33.0, - "XPQARetrieval (eng-Latn_kor-Hang)": 22.49, - "XPQARetrieval (kor-Hang_eng-Latn)": 23.02, - "XPQARetrieval (pol-Latn_pol-Latn)": 43.37, - "XPQARetrieval (eng-Latn_pol-Latn)": 19.89, - "XPQARetrieval (pol-Latn_eng-Latn)": 28.72, - "XPQARetrieval (por-Latn_por-Latn)": 41.8, - "XPQARetrieval (eng-Latn_por-Latn)": 15.79, - "XPQARetrieval (por-Latn_eng-Latn)": 33.74, - "XPQARetrieval (tam-Taml_tam-Taml)": 31.65, - "XPQARetrieval (eng-Latn_tam-Taml)": 13.18, - "XPQARetrieval (tam-Taml_eng-Latn)": 26.44, - "XPQARetrieval (cmn-Hans_cmn-Hans)": 63.98, - "XPQARetrieval (eng-Latn_cmn-Hans)": 16.52, - "XPQARetrieval (cmn-Hans_eng-Latn)": 45.32, - "XPQARetrieval (fr)": 57.47 + "Model": "DanskBERT" } ] }, - "STS": { - "spearman": [ + "PairClassification": { + "ap": [ { - "Model": "multilingual-e5-small", - "AFQMC (cmn-Hans)": 25.21, - "AFQMC": 25.21, - "ATEC (cmn-Hans)": 35.14, - "ATEC": 35.14, - "BIOSSES": 82.46, - "BQ (cmn-Hans)": 43.27, - "BQ": 43.27, - "CDSC-R (pol-Latn)": 90.27, - "CDSC-R": 90.27, - "LCQMC (cmn-Hans)": 72.7, - "LCQMC": 72.7, - "PAWSX (cmn-Hans)": 11.0, - "PAWSX": 11.01, - "QBQTC": 30.25, - "RUParaPhraserSTS (rus-Cyrl)": 70.46, - "RuSTSBenchmarkSTS (rus-Cyrl)": 78.08, - "SICK-R": 77.51, - "SICK-R-PL (pol-Latn)": 69.45, - "SICK-R-PL": 69.46, - "SICKFr (fra-Latn)": 74.67, - "SICKFr": 75.62, - "STS12": 76.56, - "STS13": 76.97, - "STS14": 75.52, - "STS15": 87.12, - "STS16": 83.63, - "STS17 (ita-Latn_eng-Latn)": 77.31, - "STS17 (en-en)": 86.42, - "STS17 (eng-Latn_ara-Arab)": 57.39, - "STS17 (eng-Latn_tur-Latn)": 55.93, - "STS17 (spa-Latn_eng-Latn)": 72.43, - "STS17 (kor-Hang)": 78.87, - "STS17 (spa-Latn)": 84.83, - "STS17 (eng-Latn_deu-Latn)": 76.82, - "STS17 (fra-Latn_eng-Latn)": 72.28, - "STS17 (nld-Latn_eng-Latn)": 75.43, - "STS17 (ara-Arab)": 73.0, - "STS22 (ita-Latn)": 76.53, - "STS22 (en)": 61.25, - "STS22 (pol-Latn_eng-Latn)": 72.69, - "STS22 (cmn-Hans)": 66.85, - "STS22 (fra-Latn)": 76.58, - "STS22 (deu-Latn)": 53.45, - "STS22 (fra-Latn_pol-Latn)": 84.52, - "STS22 (deu-Latn_pol-Latn)": 28.24, - "STS22 (spa-Latn_eng-Latn)": 74.2, - "STS22 (spa-Latn)": 66.86, - "STS22 (rus-Cyrl)": 59.9, - "STS22 (spa-Latn_ita-Latn)": 71.74, - "STS22 (pol-Latn)": 35.78, - "STS22 (tur-Latn)": 63.69, - "STS22 (ara-Arab)": 56.65, - "STS22 (cmn-Hans_eng-Latn)": 65.32, - "STS22 (deu-Latn_eng-Latn)": 56.07, - "STS22 (deu-Latn_fra-Latn)": 60.62, - "STS22 (pl)": 35.8, - "STSB (cmn-Hans)": 77.73, - "STSB": 77.73, - "STSBenchmark": 84.11, - "STSBenchmarkMultilingualSTS (en)": 84.11, - "STSBenchmarkMultilingualSTS (cmn-Hans)": 78.49, - "STSBenchmarkMultilingualSTS (rus-Cyrl)": 78.24, - "STSBenchmarkMultilingualSTS (spa-Latn)": 80.31, - "STSBenchmarkMultilingualSTS (deu-Latn)": 79.17, - "STSBenchmarkMultilingualSTS (fra-Latn)": 79.2, - "STSBenchmarkMultilingualSTS (nld-Latn)": 76.04, - "STSBenchmarkMultilingualSTS (pol-Latn)": 72.61, - "STSBenchmarkMultilingualSTS (por-Latn)": 77.39, - "STSBenchmarkMultilingualSTS (ita-Latn)": 78.21, - "STSBenchmarkMultilingualSTS (fr)": 79.32 + "Model": "DanskBERT" + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "DanskBERT" + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "DanskBERT" + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "DanskBERT" } ] }, "Summarization": { "spearman": [ { - "Model": "multilingual-e5-small", - "SummEval": 30.04, - "SummEvalFr (fra-Latn)": 31.14, - "SummEvalFr": 31.85 + "Model": "DanskBERT" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "multilingual-e5-small" + "Model": "DanskBERT" } ] } }, - "bert-base-multilingual-cased": { + "norbert3-base": { "BitextMining": { "f1": [ { - "Model": "bert-base-multilingual-cased" + "Model": "norbert3-base", + "BornholmBitextMining": 6.08 } ] }, "Classification": { "accuracy": [ { - "Model": "bert-base-multilingual-cased", - "AmazonReviewsClassification (fr)": 29.39, - "MTOPDomainClassification (fr)": 63.61, - "MTOPIntentClassification (fr)": 37.84, - "MasakhaNEWSClassification (fra)": 64.0, - "MassiveIntentClassification (fr)": 37.3, - "MassiveScenarioClassification (fr)": 44.47 + "Model": "norbert3-base", + "AngryTweetsClassification": 52.48, + "DKHateClassification": 58.78, + "DanishPoliticalCommentsClassification": 34.14, + "LccSentimentClassification": 54.07, + "MassiveIntentClassification (da)": 53.16, + "MassiveIntentClassification (nb)": 54.2, + "MassiveIntentClassification (sv)": 52.08, + "MassiveScenarioClassification (da)": 57.17, + "MassiveScenarioClassification (nb)": 60.69, + "MassiveScenarioClassification (sv)": 53.53, + "NoRecClassification": 53.4, + "NordicLangClassification": 82.67, + "NorwegianParliament": 59.33, + "ScalaDaClassification": 58.25, + "ScalaNbClassification": 60.19 } ] }, "Clustering": { "v_measure": [ { - "Model": "bert-base-multilingual-cased", - "AlloProfClusteringP2P": 51.5, - "AlloProfClusteringS2S": 43.06, - "HALClusteringS2S": 20.81, - "MLSUMClusteringP2P": 40.9, - "MLSUMClusteringS2S": 31.8, - "MasakhaNEWSClusteringP2P (fra)": 24.23, - "MasakhaNEWSClusteringS2S (fra)": 24.46 + "Model": "norbert3-base" } ] }, "PairClassification": { "ap": [ { - "Model": "bert-base-multilingual-cased", - "OpusparcusPC (fr)": 86.77, - "PawsXPairClassification (fr)": 53.39 + "Model": "norbert3-base" } ] }, "Reranking": { "map": [ { - "Model": "bert-base-multilingual-cased", - "AlloprofReranking": 36.23, - "SyntecReranking": 53.25 + "Model": "norbert3-base" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "bert-base-multilingual-cased", - "AlloprofRetrieval": 1.63, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 3.55, - "SyntecRetrieval": 18.95, - "XPQARetrieval (fr)": 18.49 + "Model": "norbert3-base" } ] }, "STS": { "spearman": [ { - "Model": "bert-base-multilingual-cased", - "SICKFr": 58.75, - "STS22 (fr)": 39.05, - "STSBenchmarkMultilingualSTS (fr)": 52.25 + "Model": "norbert3-base" } ] }, "Summarization": { "spearman": [ { - "Model": "bert-base-multilingual-cased", - "SummEvalFr": 28.81 + "Model": "norbert3-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "bert-base-multilingual-cased" + "Model": "norbert3-base" } ] } }, - "instructor-large": { + "Baichuan-text-embedding": { "BitextMining": { "f1": [ { - "Model": "instructor-large" + "Model": "Baichuan-text-embedding" } ] }, "Classification": { "accuracy": [ { - "Model": "instructor-large" + "Model": "Baichuan-text-embedding", + "AmazonReviewsClassification (zh)": 48.3, + "IFlyTek": 50.75, + "JDReview": 87.69, + "MassiveIntentClassification (zh-CN)": 74.91, + "MassiveScenarioClassification (zh-CN)": 81.28, + "MultilingualSentiment": 76.83, + "OnlineShopping": 94.42, + "TNews": 52.62, + "Waimai": 88.77 } ] }, "Clustering": { "v_measure": [ { - "Model": "instructor-large" + "Model": "Baichuan-text-embedding", + "CLSClusteringP2P": 60.37, + "CLSClusteringS2S": 51.09, + "ThuNewsClusteringP2P": 58.23, + "ThuNewsClusteringS2S": 57.83 } ] }, "PairClassification": { "ap": [ { - "Model": "instructor-large" + "Model": "Baichuan-text-embedding", + "Cmnli": 85.31, + "Ocnli": 79.33 } ] }, "Reranking": { "map": [ { - "Model": "instructor-large" + "Model": "Baichuan-text-embedding", + "CMedQAv1": 88.06, + "CMedQAv2": 88.46, + "MMarcoReranking": 34.3, + "T2Reranking": 67.85 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "instructor-large", - "BrightRetrieval (pony)": 1.32, - "BrightRetrieval (sustainable_living)": 13.16, - "BrightRetrieval (aops)": 7.94, - "BrightRetrieval (biology)": 15.61, - "BrightRetrieval (stackoverflow)": 11.21, - "BrightRetrieval (theoremqa_theorems)": 9.29, - "BrightRetrieval (psychology)": 21.94, - "BrightRetrieval (economics)": 15.99, - "BrightRetrieval (robotics)": 11.45, - "BrightRetrieval (leetcode)": 20.0, - "BrightRetrieval (earth_science)": 21.52, - "BrightRetrieval (theoremqa_questions)": 20.07 + "Model": "Baichuan-text-embedding", + "CmedqaRetrieval": 47.64, + "CovidRetrieval": 86.86, + "DuRetrieval": 88.43, + "EcomRetrieval": 66.39, + "MMarcoRetrieval": 80.17, + "MedicalRetrieval": 61.1, + "T2Retrieval": 80.11, + "VideoRetrieval": 74.28 } ] }, "STS": { "spearman": [ { - "Model": "instructor-large" + "Model": "Baichuan-text-embedding", + "AFQMC": 50.8, + "ATEC": 53.23, + "BQ": 66.49, + "LCQMC": 76.6, + "PAWSX": 47.56, + "QBQTC": 39.96, + "STS22 (zh)": 65.78, + "STSB": 80.14 } ] }, "Summarization": { "spearman": [ { - "Model": "instructor-large" + "Model": "Baichuan-text-embedding" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "instructor-large" + "Model": "Baichuan-text-embedding" } ] } @@ -23537,1766 +21834,3469 @@ "InstructionRetrieval": { "p-MRR": [ { - "Model": "nomic-embed-text-v1" + "Model": "nomic-embed-text-v1" + } + ] + } + }, + "luotuo-bert-medium": { + "BitextMining": { + "f1": [ + { + "Model": "luotuo-bert-medium" + } + ] + }, + "Classification": { + "accuracy": [ + { + "Model": "luotuo-bert-medium", + "AmazonReviewsClassification (zh)": 34.46, + "IFlyTek": 41.75, + "JDReview": 79.68, + "MassiveIntentClassification (zh-CN)": 57.47, + "MassiveScenarioClassification (zh-CN)": 65.32, + "MultilingualSentiment": 61.21, + "OnlineShopping": 84.3, + "TNews": 45.22, + "Waimai": 79.57 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "luotuo-bert-medium", + "CLSClusteringP2P": 37.01, + "CLSClusteringS2S": 33.46, + "ThuNewsClusteringP2P": 58.83, + "ThuNewsClusteringS2S": 48.26 + } + ] + }, + "PairClassification": { + "ap": [ + { + "Model": "luotuo-bert-medium", + "Cmnli": 72.55, + "Ocnli": 60.7 + } + ] + }, + "Reranking": { + "map": [ + { + "Model": "luotuo-bert-medium", + "CMedQAv1": 57.82, + "CMedQAv2": 58.88, + "MMarcoReranking": 14.55, + "T2Reranking": 65.76 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "luotuo-bert-medium", + "CmedqaRetrieval": 18.04, + "CovidRetrieval": 55.48, + "DuRetrieval": 59.36, + "EcomRetrieval": 40.48, + "MMarcoRetrieval": 55.31, + "MedicalRetrieval": 29.8, + "T2Retrieval": 58.67, + "VideoRetrieval": 38.04 + } + ] + }, + "STS": { + "spearman": [ + { + "Model": "luotuo-bert-medium", + "AFQMC": 22.24, + "ATEC": 30.84, + "BQ": 43.33, + "LCQMC": 66.74, + "PAWSX": 12.31, + "QBQTC": 27.2, + "STS22 (zh)": 66.4, + "STSB": 73.22 + } + ] + }, + "Summarization": { + "spearman": [ + { + "Model": "luotuo-bert-medium" + } + ] + }, + "InstructionRetrieval": { + "p-MRR": [ + { + "Model": "luotuo-bert-medium" } ] } }, - "sup-simcse-bert-base-uncased": { + "all-MiniLM-L12-v2": { "BitextMining": { "f1": [ { - "Model": "sup-simcse-bert-base-uncased" + "Model": "all-MiniLM-L12-v2", + "BornholmBitextMining (dan-Latn)": 35.25, + "Tatoeba (spa-Latn_eng-Latn)": 11.26, + "Tatoeba (bos-Latn_eng-Latn)": 7.05, + "Tatoeba (xho-Latn_eng-Latn)": 3.66, + "Tatoeba (fry-Latn_eng-Latn)": 14.53, + "Tatoeba (tur-Latn_eng-Latn)": 3.69, + "Tatoeba (fao-Latn_eng-Latn)": 5.92, + "Tatoeba (vie-Latn_eng-Latn)": 5.06, + "Tatoeba (ind-Latn_eng-Latn)": 5.3, + "Tatoeba (pol-Latn_eng-Latn)": 4.29, + "Tatoeba (swe-Latn_eng-Latn)": 7.31, + "Tatoeba (ita-Latn_eng-Latn)": 12.57, + "Tatoeba (dtp-Latn_eng-Latn)": 3.31, + "Tatoeba (ron-Latn_eng-Latn)": 8.77, + "Tatoeba (isl-Latn_eng-Latn)": 3.44, + "Tatoeba (hrv-Latn_eng-Latn)": 5.68, + "Tatoeba (cha-Latn_eng-Latn)": 13.07, + "Tatoeba (cor-Latn_eng-Latn)": 2.47, + "Tatoeba (cym-Latn_eng-Latn)": 5.13, + "Tatoeba (jpn-Jpan_eng-Latn)": 2.18, + "Tatoeba (lfn-Latn_eng-Latn)": 7.52, + "Tatoeba (hun-Latn_eng-Latn)": 3.93, + "Tatoeba (lat-Latn_eng-Latn)": 7.14, + "Tatoeba (tgl-Latn_eng-Latn)": 3.34, + "Tatoeba (kur-Latn_eng-Latn)": 7.3, + "Tatoeba (war-Latn_eng-Latn)": 6.18, + "Tatoeba (kab-Latn_eng-Latn)": 0.91, + "Tatoeba (kaz-Cyrl_eng-Latn)": 0.82, + "Tatoeba (slv-Latn_eng-Latn)": 4.52, + "Tatoeba (nds-Latn_eng-Latn)": 11.35, + "Tatoeba (pam-Latn_eng-Latn)": 4.73, + "Tatoeba (bul-Cyrl_eng-Latn)": 0.23, + "Tatoeba (ces-Latn_eng-Latn)": 4.2, + "Tatoeba (nno-Latn_eng-Latn)": 7.45, + "Tatoeba (ben-Beng_eng-Latn)": 0.02, + "Tatoeba (amh-Ethi_eng-Latn)": 0.01, + "Tatoeba (lit-Latn_eng-Latn)": 1.56, + "Tatoeba (pes-Arab_eng-Latn)": 0.3, + "Tatoeba (jav-Latn_eng-Latn)": 3.5, + "Tatoeba (mal-Mlym_eng-Latn)": 0.24, + "Tatoeba (lvs-Latn_eng-Latn)": 3.45, + "Tatoeba (gsw-Latn_eng-Latn)": 9.9, + "Tatoeba (fra-Latn_eng-Latn)": 17.53, + "Tatoeba (orv-Cyrl_eng-Latn)": 0.15, + "Tatoeba (kat-Geor_eng-Latn)": 0.45, + "Tatoeba (awa-Deva_eng-Latn)": 0.44, + "Tatoeba (epo-Latn_eng-Latn)": 8.5, + "Tatoeba (mhr-Cyrl_eng-Latn)": 0.0, + "Tatoeba (dan-Latn_eng-Latn)": 10.21, + "Tatoeba (bel-Cyrl_eng-Latn)": 0.85, + "Tatoeba (nld-Latn_eng-Latn)": 12.56, + "Tatoeba (mkd-Cyrl_eng-Latn)": 0.01, + "Tatoeba (mon-Cyrl_eng-Latn)": 0.06, + "Tatoeba (ast-Latn_eng-Latn)": 9.99, + "Tatoeba (cat-Latn_eng-Latn)": 11.79, + "Tatoeba (oci-Latn_eng-Latn)": 8.72, + "Tatoeba (khm-Khmr_eng-Latn)": 0.42, + "Tatoeba (urd-Arab_eng-Latn)": 0.0, + "Tatoeba (tzl-Latn_eng-Latn)": 6.87, + "Tatoeba (arq-Arab_eng-Latn)": 0.28, + "Tatoeba (uig-Arab_eng-Latn)": 0.4, + "Tatoeba (dsb-Latn_eng-Latn)": 3.06, + "Tatoeba (hsb-Latn_eng-Latn)": 2.89, + "Tatoeba (kzj-Latn_eng-Latn)": 3.64, + "Tatoeba (cbk-Latn_eng-Latn)": 9.76, + "Tatoeba (afr-Latn_eng-Latn)": 7.59, + "Tatoeba (gle-Latn_eng-Latn)": 3.08, + "Tatoeba (csb-Latn_eng-Latn)": 5.21, + "Tatoeba (mar-Deva_eng-Latn)": 0.04, + "Tatoeba (arz-Arab_eng-Latn)": 0.0, + "Tatoeba (tat-Cyrl_eng-Latn)": 0.75, + "Tatoeba (hin-Deva_eng-Latn)": 0.0, + "Tatoeba (ang-Latn_eng-Latn)": 14.63, + "Tatoeba (heb-Hebr_eng-Latn)": 0.3, + "Tatoeba (tuk-Latn_eng-Latn)": 2.66, + "Tatoeba (ile-Latn_eng-Latn)": 17.43, + "Tatoeba (zsm-Latn_eng-Latn)": 5.99, + "Tatoeba (kor-Hang_eng-Latn)": 0.9, + "Tatoeba (uzb-Latn_eng-Latn)": 2.2, + "Tatoeba (fin-Latn_eng-Latn)": 3.65, + "Tatoeba (hye-Armn_eng-Latn)": 0.5, + "Tatoeba (ukr-Cyrl_eng-Latn)": 0.57, + "Tatoeba (swh-Latn_eng-Latn)": 5.82, + "Tatoeba (gla-Latn_eng-Latn)": 2.58, + "Tatoeba (aze-Latn_eng-Latn)": 1.47, + "Tatoeba (ara-Arab_eng-Latn)": 0.43, + "Tatoeba (eus-Latn_eng-Latn)": 6.58, + "Tatoeba (deu-Latn_eng-Latn)": 13.89, + "Tatoeba (por-Latn_eng-Latn)": 11.36, + "Tatoeba (ber-Tfng_eng-Latn)": 4.72, + "Tatoeba (sqi-Latn_eng-Latn)": 5.86, + "Tatoeba (pms-Latn_eng-Latn)": 8.94, + "Tatoeba (ina-Latn_eng-Latn)": 25.36, + "Tatoeba (ido-Latn_eng-Latn)": 11.08, + "Tatoeba (slk-Latn_eng-Latn)": 4.2, + "Tatoeba (glg-Latn_eng-Latn)": 12.6, + "Tatoeba (nov-Latn_eng-Latn)": 19.45, + "Tatoeba (tel-Telu_eng-Latn)": 0.67, + "Tatoeba (tam-Taml_eng-Latn)": 0.33, + "Tatoeba (bre-Latn_eng-Latn)": 3.68, + "Tatoeba (tha-Thai_eng-Latn)": 0.67, + "Tatoeba (nob-Latn_eng-Latn)": 8.02, + "Tatoeba (est-Latn_eng-Latn)": 2.6, + "Tatoeba (wuu-Hans_eng-Latn)": 1.89, + "Tatoeba (swg-Latn_eng-Latn)": 11.9, + "Tatoeba (max-Deva_eng-Latn)": 8.4, + "Tatoeba (srp-Cyrl_eng-Latn)": 2.22, + "Tatoeba (yue-Hant_eng-Latn)": 1.89, + "Tatoeba (rus-Cyrl_eng-Latn)": 0.07, + "Tatoeba (ell-Grek_eng-Latn)": 0.2, + "Tatoeba (ceb-Latn_eng-Latn)": 3.95, + "Tatoeba (yid-Hebr_eng-Latn)": 0.19, + "Tatoeba (cmn-Hans_eng-Latn)": 2.45 } ] }, "Classification": { "accuracy": [ { - "Model": "sup-simcse-bert-base-uncased", - "AmazonCounterfactualClassification (en)": 75.75, - "AmazonPolarityClassification": 82.47, - "AmazonReviewsClassification (en)": 39.6, - "Banking77Classification": 75.76, - "EmotionClassification": 44.81, - "ImdbClassification": 73.53, - "MTOPDomainClassification (en)": 84.29, - "MTOPIntentClassification (en)": 63.14, - "MassiveIntentClassification (en)": 65.95, - "MassiveScenarioClassification (en)": 70.78, - "ToxicConversationsClassification": 72.04, - "TweetSentimentExtractionClassification": 59.73 + "Model": "all-MiniLM-L12-v2", + "AllegroReviews (pol-Latn)": 23.85, + "AmazonCounterfactualClassification (en-ext)": 67.24, + "AmazonCounterfactualClassification (en)": 65.28, + "AmazonCounterfactualClassification (deu-Latn)": 57.13, + "AmazonCounterfactualClassification (jpn-Jpan)": 59.94, + "AmazonCounterfactualClassification (de)": 57.1, + "AmazonCounterfactualClassification (ja)": 59.91, + "AmazonPolarityClassification": 62.98, + "AmazonReviewsClassification (en)": 30.79, + "AmazonReviewsClassification (deu-Latn)": 25.92, + "AmazonReviewsClassification (spa-Latn)": 27.64, + "AmazonReviewsClassification (fra-Latn)": 27.53, + "AmazonReviewsClassification (jpn-Jpan)": 23.57, + "AmazonReviewsClassification (cmn-Hans)": 22.99, + "AmazonReviewsClassification (de)": 25.91, + "AmazonReviewsClassification (es)": 27.63, + "AmazonReviewsClassification (fr)": 27.54, + "AmazonReviewsClassification (ja)": 23.57, + "AmazonReviewsClassification (zh)": 22.99, + "AngryTweetsClassification (dan-Latn)": 42.87, + "Banking77Classification": 80.4, + "CBD (pol-Latn)": 48.46, + "DanishPoliticalCommentsClassification (dan-Latn)": 27.07, + "EmotionClassification": 41.17, + "GeoreviewClassification (rus-Cyrl)": 23.49, + "HeadlineClassification (rus-Cyrl)": 28.49, + "IFlyTek (cmn-Hans)": 15.31, + "ImdbClassification": 59.76, + "InappropriatenessClassification (rus-Cyrl)": 50.85, + "JDReview (cmn-Hans)": 59.57, + "KinopoiskClassification (rus-Cyrl)": 34.17, + "LccSentimentClassification (dan-Latn)": 41.93, + "MTOPDomainClassification (en)": 91.9, + "MTOPDomainClassification (deu-Latn)": 72.04, + "MTOPDomainClassification (spa-Latn)": 72.99, + "MTOPDomainClassification (fra-Latn)": 75.57, + "MTOPDomainClassification (hin-Deva)": 40.4, + "MTOPDomainClassification (tha-Thai)": 16.36, + "MTOPDomainClassification (de)": 72.04, + "MTOPDomainClassification (es)": 72.99, + "MTOPDomainClassification (fr)": 75.59, + "MTOPDomainClassification (hi)": 40.36, + "MTOPDomainClassification (th)": 17.1, + "MTOPIntentClassification (en)": 62.84, + "MTOPIntentClassification (deu-Latn)": 43.42, + "MTOPIntentClassification (spa-Latn)": 41.91, + "MTOPIntentClassification (fra-Latn)": 38.96, + "MTOPIntentClassification (hin-Deva)": 17.76, + "MTOPIntentClassification (tha-Thai)": 6.13, + "MTOPIntentClassification (de)": 43.41, + "MTOPIntentClassification (es)": 41.88, + "MTOPIntentClassification (fr)": 38.94, + "MTOPIntentClassification (hi)": 17.75, + "MTOPIntentClassification (th)": 5.63, + "MasakhaNEWSClassification (amh-Ethi)": 30.64, + "MasakhaNEWSClassification (eng)": 76.62, + "MasakhaNEWSClassification (fra-Latn)": 67.18, + "MasakhaNEWSClassification (hau-Latn)": 52.59, + "MasakhaNEWSClassification (ibo-Latn)": 54.26, + "MasakhaNEWSClassification (lin-Latn)": 62.23, + "MasakhaNEWSClassification (lug-Latn)": 47.62, + "MasakhaNEWSClassification (orm-Ethi)": 47.17, + "MasakhaNEWSClassification (pcm-Latn)": 91.77, + "MasakhaNEWSClassification (run-Latn)": 54.47, + "MasakhaNEWSClassification (sna-Latn)": 66.53, + "MasakhaNEWSClassification (som-Latn)": 40.27, + "MasakhaNEWSClassification (swa-Latn)": 47.77, + "MasakhaNEWSClassification (tir-Ethi)": 21.18, + "MasakhaNEWSClassification (xho-Latn)": 54.34, + "MasakhaNEWSClassification (yor-Latn)": 58.61, + "MasakhaNEWSClassification (fra)": 72.2, + "MassiveIntentClassification (jpn-Jpan)": 30.89, + "MassiveIntentClassification (khm-Khmr)": 4.99, + "MassiveIntentClassification (slv-Latn)": 38.48, + "MassiveIntentClassification (hye-Armn)": 8.69, + "MassiveIntentClassification (ita-Latn)": 43.16, + "MassiveIntentClassification (fin-Latn)": 39.19, + "MassiveIntentClassification (afr-Latn)": 38.84, + "MassiveIntentClassification (kor-Kore)": 19.97, + "MassiveIntentClassification (ben-Beng)": 13.7, + "MassiveIntentClassification (heb-Hebr)": 23.71, + "MassiveIntentClassification (dan-Latn)": 44.35, + "MassiveIntentClassification (fra-Latn)": 44.75, + "MassiveIntentClassification (pol-Latn)": 37.59, + "MassiveIntentClassification (por-Latn)": 45.08, + "MassiveIntentClassification (tha-Thai)": 10.46, + "MassiveIntentClassification (nob-Latn)": 41.79, + "MassiveIntentClassification (kat-Geor)": 9.17, + "MassiveIntentClassification (tgl-Latn)": 38.63, + "MassiveIntentClassification (swe-Latn)": 40.33, + "MassiveIntentClassification (hun-Latn)": 37.95, + "MassiveIntentClassification (cmo-Hant)": 22.38, + "MassiveIntentClassification (hin-Deva)": 18.0, + "MassiveIntentClassification (tur-Latn)": 35.93, + "MassiveIntentClassification (vie-Latn)": 37.35, + "MassiveIntentClassification (mal-Mlym)": 2.83, + "MassiveIntentClassification (aze-Latn)": 34.3, + "MassiveIntentClassification (amh-Ethi)": 2.45, + "MassiveIntentClassification (kan-Knda)": 3.07, + "MassiveIntentClassification (deu-Latn)": 44.12, + "MassiveIntentClassification (rus-Cyrl)": 26.29, + "MassiveIntentClassification (ara-Arab)": 21.02, + "MassiveIntentClassification (msa-Latn)": 36.16, + "MassiveIntentClassification (nld-Latn)": 41.77, + "MassiveIntentClassification (fas-Arab)": 23.56, + "MassiveIntentClassification (isl-Latn)": 35.17, + "MassiveIntentClassification (cym-Latn)": 35.65, + "MassiveIntentClassification (cmo-Hans)": 23.74, + "MassiveIntentClassification (ell-Grek)": 28.68, + "MassiveIntentClassification (spa-Latn)": 40.82, + "MassiveIntentClassification (ind-Latn)": 39.65, + "MassiveIntentClassification (jav-Latn)": 36.67, + "MassiveIntentClassification (mon-Cyrl)": 23.27, + "MassiveIntentClassification (mya-Mymr)": 4.36, + "MassiveIntentClassification (sqi-Latn)": 41.47, + "MassiveIntentClassification (tel-Telu)": 2.54, + "MassiveIntentClassification (en)": 67.15, + "MassiveIntentClassification (ron-Latn)": 41.64, + "MassiveIntentClassification (tam-Taml)": 13.12, + "MassiveIntentClassification (swa-Latn)": 35.26, + "MassiveIntentClassification (urd-Arab)": 16.26, + "MassiveIntentClassification (lav-Latn)": 38.54, + "MassiveIntentClassification (af)": 38.94, + "MassiveIntentClassification (am)": 2.45, + "MassiveIntentClassification (ar)": 20.94, + "MassiveIntentClassification (az)": 34.25, + "MassiveIntentClassification (bn)": 13.67, + "MassiveIntentClassification (cy)": 35.71, + "MassiveIntentClassification (da)": 44.43, + "MassiveIntentClassification (de)": 44.17, + "MassiveIntentClassification (el)": 28.7, + "MassiveIntentClassification (es)": 40.91, + "MassiveIntentClassification (fa)": 23.52, + "MassiveIntentClassification (fi)": 39.27, + "MassiveIntentClassification (fr)": 44.82, + "MassiveIntentClassification (he)": 23.65, + "MassiveIntentClassification (hi)": 17.98, + "MassiveIntentClassification (hu)": 38.0, + "MassiveIntentClassification (hy)": 8.69, + "MassiveIntentClassification (id)": 39.66, + "MassiveIntentClassification (is)": 35.14, + "MassiveIntentClassification (it)": 43.17, + "MassiveIntentClassification (ja)": 30.94, + "MassiveIntentClassification (jv)": 36.69, + "MassiveIntentClassification (ka)": 9.17, + "MassiveIntentClassification (km)": 4.99, + "MassiveIntentClassification (kn)": 3.08, + "MassiveIntentClassification (ko)": 19.97, + "MassiveIntentClassification (lv)": 38.61, + "MassiveIntentClassification (ml)": 2.85, + "MassiveIntentClassification (mn)": 23.25, + "MassiveIntentClassification (ms)": 36.21, + "MassiveIntentClassification (my)": 4.38, + "MassiveIntentClassification (nb)": 41.91, + "MassiveIntentClassification (nl)": 41.85, + "MassiveIntentClassification (pl)": 37.63, + "MassiveIntentClassification (pt)": 45.12, + "MassiveIntentClassification (ro)": 41.71, + "MassiveIntentClassification (ru)": 26.33, + "MassiveIntentClassification (sl)": 38.52, + "MassiveIntentClassification (sq)": 41.62, + "MassiveIntentClassification (sv)": 40.42, + "MassiveIntentClassification (sw)": 35.28, + "MassiveIntentClassification (ta)": 13.1, + "MassiveIntentClassification (te)": 2.56, + "MassiveIntentClassification (th)": 10.54, + "MassiveIntentClassification (tl)": 38.56, + "MassiveIntentClassification (tr)": 35.9, + "MassiveIntentClassification (ur)": 16.18, + "MassiveIntentClassification (vi)": 37.38, + "MassiveIntentClassification (zh-CN)": 23.74, + "MassiveIntentClassification (zh-TW)": 22.39, + "MassiveScenarioClassification (jav-Latn)": 44.54, + "MassiveScenarioClassification (aze-Latn)": 39.62, + "MassiveScenarioClassification (cmo-Hans)": 33.19, + "MassiveScenarioClassification (swa-Latn)": 43.18, + "MassiveScenarioClassification (fra-Latn)": 53.77, + "MassiveScenarioClassification (mon-Cyrl)": 29.01, + "MassiveScenarioClassification (kat-Geor)": 14.85, + "MassiveScenarioClassification (ben-Beng)": 18.98, + "MassiveScenarioClassification (ind-Latn)": 44.37, + "MassiveScenarioClassification (kor-Kore)": 25.72, + "MassiveScenarioClassification (lav-Latn)": 42.75, + "MassiveScenarioClassification (deu-Latn)": 52.08, + "MassiveScenarioClassification (hun-Latn)": 44.1, + "MassiveScenarioClassification (tam-Taml)": 19.4, + "MassiveScenarioClassification (afr-Latn)": 45.72, + "MassiveScenarioClassification (nob-Latn)": 47.35, + "MassiveScenarioClassification (urd-Arab)": 24.45, + "MassiveScenarioClassification (tha-Thai)": 18.32, + "MassiveScenarioClassification (ita-Latn)": 51.7, + "MassiveScenarioClassification (en)": 74.58, + "MassiveScenarioClassification (sqi-Latn)": 49.12, + "MassiveScenarioClassification (mya-Mymr)": 10.06, + "MassiveScenarioClassification (ara-Arab)": 27.66, + "MassiveScenarioClassification (tur-Latn)": 41.8, + "MassiveScenarioClassification (khm-Khmr)": 9.75, + "MassiveScenarioClassification (cym-Latn)": 41.43, + "MassiveScenarioClassification (cmo-Hant)": 31.14, + "MassiveScenarioClassification (hye-Armn)": 14.87, + "MassiveScenarioClassification (ell-Grek)": 35.55, + "MassiveScenarioClassification (ron-Latn)": 49.94, + "MassiveScenarioClassification (kan-Knda)": 8.32, + "MassiveScenarioClassification (jpn-Jpan)": 36.77, + "MassiveScenarioClassification (fin-Latn)": 45.8, + "MassiveScenarioClassification (swe-Latn)": 46.81, + "MassiveScenarioClassification (dan-Latn)": 49.5, + "MassiveScenarioClassification (msa-Latn)": 44.67, + "MassiveScenarioClassification (hin-Deva)": 23.03, + "MassiveScenarioClassification (tgl-Latn)": 48.29, + "MassiveScenarioClassification (pol-Latn)": 44.74, + "MassiveScenarioClassification (isl-Latn)": 43.11, + "MassiveScenarioClassification (por-Latn)": 53.0, + "MassiveScenarioClassification (slv-Latn)": 42.24, + "MassiveScenarioClassification (rus-Cyrl)": 28.77, + "MassiveScenarioClassification (tel-Telu)": 7.74, + "MassiveScenarioClassification (heb-Hebr)": 25.73, + "MassiveScenarioClassification (fas-Arab)": 29.0, + "MassiveScenarioClassification (vie-Latn)": 40.97, + "MassiveScenarioClassification (nld-Latn)": 49.14, + "MassiveScenarioClassification (spa-Latn)": 50.73, + "MassiveScenarioClassification (mal-Mlym)": 7.25, + "MassiveScenarioClassification (amh-Ethi)": 7.41, + "MassiveScenarioClassification (af)": 45.71, + "MassiveScenarioClassification (am)": 7.41, + "MassiveScenarioClassification (ar)": 27.62, + "MassiveScenarioClassification (az)": 39.58, + "MassiveScenarioClassification (bn)": 18.98, + "MassiveScenarioClassification (cy)": 41.4, + "MassiveScenarioClassification (da)": 49.47, + "MassiveScenarioClassification (de)": 52.07, + "MassiveScenarioClassification (el)": 35.51, + "MassiveScenarioClassification (es)": 50.74, + "MassiveScenarioClassification (fa)": 29.0, + "MassiveScenarioClassification (fi)": 45.8, + "MassiveScenarioClassification (fr)": 53.76, + "MassiveScenarioClassification (he)": 25.68, + "MassiveScenarioClassification (hi)": 23.02, + "MassiveScenarioClassification (hu)": 44.09, + "MassiveScenarioClassification (hy)": 14.83, + "MassiveScenarioClassification (id)": 44.35, + "MassiveScenarioClassification (is)": 43.08, + "MassiveScenarioClassification (it)": 51.71, + "MassiveScenarioClassification (ja)": 36.75, + "MassiveScenarioClassification (jv)": 44.57, + "MassiveScenarioClassification (ka)": 14.84, + "MassiveScenarioClassification (km)": 9.75, + "MassiveScenarioClassification (kn)": 8.32, + "MassiveScenarioClassification (ko)": 25.72, + "MassiveScenarioClassification (lv)": 42.75, + "MassiveScenarioClassification (ml)": 7.25, + "MassiveScenarioClassification (mn)": 29.03, + "MassiveScenarioClassification (ms)": 44.65, + "MassiveScenarioClassification (my)": 10.07, + "MassiveScenarioClassification (nb)": 47.36, + "MassiveScenarioClassification (nl)": 49.15, + "MassiveScenarioClassification (pl)": 44.72, + "MassiveScenarioClassification (pt)": 53.0, + "MassiveScenarioClassification (ro)": 49.97, + "MassiveScenarioClassification (ru)": 28.75, + "MassiveScenarioClassification (sl)": 42.26, + "MassiveScenarioClassification (sq)": 49.14, + "MassiveScenarioClassification (sv)": 46.83, + "MassiveScenarioClassification (sw)": 43.18, + "MassiveScenarioClassification (ta)": 19.38, + "MassiveScenarioClassification (te)": 7.74, + "MassiveScenarioClassification (th)": 18.32, + "MassiveScenarioClassification (tl)": 48.31, + "MassiveScenarioClassification (tr)": 41.79, + "MassiveScenarioClassification (ur)": 24.46, + "MassiveScenarioClassification (vi)": 40.94, + "MassiveScenarioClassification (zh-CN)": 33.18, + "MassiveScenarioClassification (zh-TW)": 31.16, + "MultilingualSentiment (cmn-Hans)": 40.52, + "NoRecClassification (nob-Latn)": 37.73, + "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 54.17, + "OnlineShopping (cmn-Hans)": 58.65, + "PAC (pol-Latn)": 59.53, + "PolEmo2.0-IN (pol-Latn)": 38.32, + "PolEmo2.0-OUT (pol-Latn)": 22.98, + "RuReviewsClassification (rus-Cyrl)": 42.49, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 10.49, + "RuSciBenchOECDClassification (rus-Cyrl)": 8.31, + "TNews (cmn-Hans)": 20.37, + "ToxicConversationsClassification": 67.47, + "TweetSentimentExtractionClassification": 54.25, + "Waimai (cmn-Hans)": 63.48 } ] }, "Clustering": { "v_measure": [ { - "Model": "sup-simcse-bert-base-uncased", - "ArxivClusteringP2P": 35.18, - "ArxivClusteringS2S": 27.54, - "BiorxivClusteringP2P": 30.15, - "BiorxivClusteringS2S": 24.67, - "MedrxivClusteringP2P": 26.25, - "MedrxivClusteringS2S": 24.12, - "RedditClustering": 40.23, - "RedditClusteringP2P": 47.74, - "StackExchangeClustering": 47.55, - "StackExchangeClusteringP2P": 29.45, - "TwentyNewsgroupsClustering": 34.86 + "Model": "all-MiniLM-L12-v2", + "AlloProfClusteringP2P": 46.03, + "AlloProfClusteringS2S": 31.83, + "ArxivClusteringP2P": 46.07, + "ArxivClusteringS2S": 37.5, + "BiorxivClusteringP2P": 36.99, + "BiorxivClusteringS2S": 33.21, + "GeoreviewClusteringP2P (rus-Cyrl)": 20.76, + "HALClusteringS2S": 19.58, + "MLSUMClusteringP2P": 34.35, + "MLSUMClusteringS2S": 29.3, + "MasakhaNEWSClusteringP2P (amh-Ethi)": 40.5, + "MasakhaNEWSClusteringP2P (eng)": 55.86, + "MasakhaNEWSClusteringP2P (fra-Latn)": 42.72, + "MasakhaNEWSClusteringP2P (hau-Latn)": 26.61, + "MasakhaNEWSClusteringP2P (ibo-Latn)": 44.26, + "MasakhaNEWSClusteringP2P (lin-Latn)": 54.52, + "MasakhaNEWSClusteringP2P (lug-Latn)": 43.87, + "MasakhaNEWSClusteringP2P (orm-Ethi)": 24.87, + "MasakhaNEWSClusteringP2P (pcm-Latn)": 74.42, + "MasakhaNEWSClusteringP2P (run-Latn)": 51.73, + "MasakhaNEWSClusteringP2P (sna-Latn)": 46.89, + "MasakhaNEWSClusteringP2P (som-Latn)": 31.17, + "MasakhaNEWSClusteringP2P (swa-Latn)": 23.72, + "MasakhaNEWSClusteringP2P (tir-Ethi)": 44.08, + "MasakhaNEWSClusteringP2P (xho-Latn)": 26.97, + "MasakhaNEWSClusteringP2P (yor-Latn)": 32.51, + "MasakhaNEWSClusteringP2P (fra)": 42.72, + "MasakhaNEWSClusteringS2S (amh-Ethi)": 44.11, + "MasakhaNEWSClusteringS2S (eng)": 40.71, + "MasakhaNEWSClusteringS2S (fra-Latn)": 32.47, + "MasakhaNEWSClusteringS2S (hau-Latn)": 20.63, + "MasakhaNEWSClusteringS2S (ibo-Latn)": 35.33, + "MasakhaNEWSClusteringS2S (lin-Latn)": 54.52, + "MasakhaNEWSClusteringS2S (lug-Latn)": 51.42, + "MasakhaNEWSClusteringS2S (orm-Ethi)": 24.84, + "MasakhaNEWSClusteringS2S (pcm-Latn)": 70.72, + "MasakhaNEWSClusteringS2S (run-Latn)": 50.88, + "MasakhaNEWSClusteringS2S (sna-Latn)": 46.6, + "MasakhaNEWSClusteringS2S (som-Latn)": 29.87, + "MasakhaNEWSClusteringS2S (swa-Latn)": 10.82, + "MasakhaNEWSClusteringS2S (tir-Ethi)": 43.63, + "MasakhaNEWSClusteringS2S (xho-Latn)": 24.55, + "MasakhaNEWSClusteringS2S (yor-Latn)": 32.85, + "MasakhaNEWSClusteringS2S (fra)": 32.47, + "MedrxivClusteringP2P": 34.25, + "MedrxivClusteringS2S": 32.24, + "RedditClustering": 51.18, + "RedditClusteringP2P": 54.8, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 10.65, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 10.19, + "StackExchangeClustering": 53.05, + "StackExchangeClusteringP2P": 33.13, + "TwentyNewsgroupsClustering": 47.47 } ] }, "PairClassification": { "ap": [ { - "Model": "sup-simcse-bert-base-uncased", - "SprintDuplicateQuestions": 69.39, - "TwitterSemEval2015": 67.75, - "TwitterURLCorpus": 83.89 + "Model": "all-MiniLM-L12-v2", + "CDSC-E (pol-Latn)": 49.04, + "OpusparcusPC (deu-Latn)": 91.2, + "OpusparcusPC (en)": 97.41, + "OpusparcusPC (fin-Latn)": 85.99, + "OpusparcusPC (fra-Latn)": 87.35, + "OpusparcusPC (rus-Cyrl)": 79.23, + "OpusparcusPC (swe-Latn)": 84.87, + "PSC (pol-Latn)": 87.92, + "PawsXPairClassification (deu-Latn)": 50.83, + "PawsXPairClassification (en)": 58.62, + "PawsXPairClassification (spa-Latn)": 52.08, + "PawsXPairClassification (fra-Latn)": 55.54, + "PawsXPairClassification (jpn-Hira)": 47.75, + "PawsXPairClassification (kor-Hang)": 49.59, + "PawsXPairClassification (cmn-Hans)": 52.8, + "SICK-E-PL (pol-Latn)": 49.63, + "SprintDuplicateQuestions": 92.45, + "TERRa (rus-Cyrl)": 46.4, + "TwitterSemEval2015": 70.02, + "TwitterURLCorpus": 84.77 } ] }, "Reranking": { "map": [ { - "Model": "sup-simcse-bert-base-uncased", - "AskUbuntuDupQuestions": 51.8, - "MindSmallReranking": 29.3, - "SciDocsRR": 70.14, - "StackOverflowDupQuestions": 38.9 + "Model": "all-MiniLM-L12-v2", + "AlloprofReranking (fra-Latn)": 67.01, + "AskUbuntuDupQuestions": 64.06, + "MMarcoReranking (cmn-Hans)": 5.27, + "MindSmallReranking": 31.02, + "RuBQReranking (rus-Cyrl)": 38.51, + "SciDocsRR": 87.2, + "StackOverflowDupQuestions": 51.47, + "SyntecReranking (fra-Latn)": 69.17, + "T2Reranking (cmn-Hans)": 60.32 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "sup-simcse-bert-base-uncased", - "ArguAna": 38.33, - "CQADupstackRetrieval": 14.5, - "ClimateFEVER": 11.98, - "DBPedia": 19.73, - "FEVER": 20.41, - "FiQA2018": 10.41, - "HotpotQA": 22.9, - "MSMARCO": 11.0, - "NFCorpus": 12.42, - "NQ": 16.08, - "QuoraRetrieval": 79.62, - "SCIDOCS": 7.53, - "SciFact": 29.59, - "TRECCOVID": 22.93, - "Touche2020": 9.9 + "Model": "all-MiniLM-L12-v2", + "AILACasedocs": 16.8, + "AILAStatutes": 20.71, + "ARCChallenge": 10.23, + "AlloprofRetrieval (fra-Latn)": 33.2, + "AlloprofRetrieval": 33.2, + "AlphaNLI": 25.35, + "ArguAna": 47.13, + "ArguAna-PL (pol-Latn)": 13.4, + "BSARDRetrieval (fra-Latn)": 6.24, + "CQADupstackRetrieval": 42.53, + "ClimateFEVER": 21.57, + "CmedqaRetrieval (cmn-Hans)": 2.58, + "CovidRetrieval (cmn-Hans)": 10.79, + "DBPedia": 33.35, + "DuRetrieval (cmn-Hans)": 6.62, + "EcomRetrieval (cmn-Hans)": 4.01, + "FEVER": 55.9, + "FiQA-PL (pol-Latn)": 5.82, + "FiQA2018": 37.27, + "GerDaLIRSmall (deu-Latn)": 1.35, + "HellaSwag": 24.08, + "HotpotQA": 44.59, + "LEMBNarrativeQARetrieval": 19.64, + "LEMBNeedleRetrieval": 12.25, + "LEMBPasskeyRetrieval": 14.75, + "LEMBQMSumRetrieval": 13.08, + "LEMBSummScreenFDRetrieval": 46.98, + "LEMBWikimQARetrieval": 44.88, + "LeCaRDv2 (zho-Hans)": 18.77, + "LegalBenchConsumerContractsQA": 60.21, + "LegalBenchCorporateLobbying": 88.69, + "LegalQuAD (deu-Latn)": 7.44, + "LegalSummarization": 57.43, + "MMarcoRetrieval (cmn-Hans)": 7.46, + "MSMARCO": 39.03, + "MedicalRetrieval (cmn-Hans)": 2.3, + "MintakaRetrieval (ara-Arab)": 2.74, + "MintakaRetrieval (deu-Latn)": 20.04, + "MintakaRetrieval (spa-Latn)": 11.76, + "MintakaRetrieval (fra-Latn)": 16.08, + "MintakaRetrieval (hin-Deva)": 3.04, + "MintakaRetrieval (ita-Latn)": 11.83, + "MintakaRetrieval (jpn-Hira)": 7.31, + "MintakaRetrieval (por-Latn)": 13.66, + "NFCorpus": 32.25, + "NFCorpus-PL (pol-Latn)": 15.43, + "NQ": 46.47, + "PIQA": 26.44, + "Quail": 3.08, + "QuoraRetrieval": 87.75, + "RARbCode": 42.44, + "RARbMath": 66.36, + "RuBQRetrieval (rus-Cyrl)": 8.84, + "SCIDOCS": 21.82, + "SCIDOCS-PL (pol-Latn)": 5.34, + "SIQA": 2.09, + "SciFact": 62.64, + "SciFact-PL (pol-Latn)": 22.48, + "SpartQA": 2.67, + "SyntecRetrieval (fra-Latn)": 60.8, + "T2Retrieval (cmn-Hans)": 4.82, + "TRECCOVID": 50.82, + "TRECCOVID-PL (pol-Latn)": 16.52, + "TempReasonL1": 1.66, + "TempReasonL2Fact": 10.31, + "TempReasonL2Pure": 0.63, + "TempReasonL3Fact": 11.11, + "TempReasonL3Pure": 6.63, + "Touche2020": 17.22, + "VideoRetrieval (cmn-Hans)": 9.38, + "WinoGrande": 27.2, + "XPQARetrieval (ara-Arab_ara-Arab)": 7.83, + "XPQARetrieval (eng-Latn_ara-Arab)": 2.52, + "XPQARetrieval (ara-Arab_eng-Latn)": 8.88, + "XPQARetrieval (deu-Latn_deu-Latn)": 56.77, + "XPQARetrieval (eng-Latn_deu-Latn)": 18.2, + "XPQARetrieval (deu-Latn_eng-Latn)": 30.06, + "XPQARetrieval (spa-Latn_spa-Latn)": 42.22, + "XPQARetrieval (eng-Latn_spa-Latn)": 7.53, + "XPQARetrieval (spa-Latn_eng-Latn)": 26.27, + "XPQARetrieval (fra-Latn_fra-Latn)": 55.9, + "XPQARetrieval (eng-Latn_fra-Latn)": 14.89, + "XPQARetrieval (fra-Latn_eng-Latn)": 34.2, + "XPQARetrieval (hin-Deva_hin-Deva)": 33.26, + "XPQARetrieval (eng-Latn_hin-Deva)": 6.44, + "XPQARetrieval (hin-Deva_eng-Latn)": 6.98, + "XPQARetrieval (ita-Latn_ita-Latn)": 58.68, + "XPQARetrieval (eng-Latn_ita-Latn)": 8.56, + "XPQARetrieval (ita-Latn_eng-Latn)": 28.71, + "XPQARetrieval (jpn-Hira_jpn-Hira)": 39.53, + "XPQARetrieval (eng-Latn_jpn-Hira)": 5.7, + "XPQARetrieval (jpn-Hira_eng-Latn)": 13.75, + "XPQARetrieval (kor-Hang_kor-Hang)": 13.48, + "XPQARetrieval (eng-Latn_kor-Hang)": 7.43, + "XPQARetrieval (kor-Hang_eng-Latn)": 7.34, + "XPQARetrieval (pol-Latn_pol-Latn)": 28.07, + "XPQARetrieval (eng-Latn_pol-Latn)": 10.03, + "XPQARetrieval (pol-Latn_eng-Latn)": 16.58, + "XPQARetrieval (por-Latn_por-Latn)": 34.09, + "XPQARetrieval (eng-Latn_por-Latn)": 7.38, + "XPQARetrieval (por-Latn_eng-Latn)": 22.59, + "XPQARetrieval (tam-Taml_tam-Taml)": 9.13, + "XPQARetrieval (eng-Latn_tam-Taml)": 4.15, + "XPQARetrieval (tam-Taml_eng-Latn)": 3.76, + "XPQARetrieval (cmn-Hans_cmn-Hans)": 21.09, + "XPQARetrieval (eng-Latn_cmn-Hans)": 6.58, + "XPQARetrieval (cmn-Hans_eng-Latn)": 9.39, + "XPQARetrieval (fr)": 55.9 } ] }, "STS": { "spearman": [ { - "Model": "sup-simcse-bert-base-uncased", - "BIOSSES": 68.38, - "SICK-R": 80.77, - "STS12": 75.3, - "STS13": 84.67, - "STS14": 80.19, - "STS15": 85.4, - "STS16": 80.82, - "STS17 (en-en)": 89.44, - "STS22 (en)": 61.96, - "STSBenchmark": 84.25 + "Model": "all-MiniLM-L12-v2", + "AFQMC (cmn-Hans)": 7.94, + "ATEC (cmn-Hans)": 12.97, + "BIOSSES": 83.57, + "BQ (cmn-Hans)": 23.31, + "CDSC-R (pol-Latn)": 82.5, + "LCQMC (cmn-Hans)": 21.04, + "PAWSX (cmn-Hans)": 7.31, + "RUParaPhraserSTS (rus-Cyrl)": 45.47, + "RuSTSBenchmarkSTS (rus-Cyrl)": 56.33, + "SICK-R": 79.32, + "SICK-R-PL (pol-Latn)": 54.26, + "SICKFr (fra-Latn)": 63.16, + "STS12": 73.08, + "STS13": 82.13, + "STS14": 76.73, + "STS15": 85.58, + "STS16": 80.23, + "STS17 (nld-Latn_eng-Latn)": 24.51, + "STS17 (eng-Latn_ara-Arab)": 0.54, + "STS17 (ara-Arab)": 58.71, + "STS17 (kor-Hang)": 43.37, + "STS17 (eng-Latn_tur-Latn)": 0.43, + "STS17 (ita-Latn_eng-Latn)": 24.28, + "STS17 (eng-Latn_deu-Latn)": 27.54, + "STS17 (fra-Latn_eng-Latn)": 30.7, + "STS17 (spa-Latn)": 78.37, + "STS17 (en-en)": 88.63, + "STS17 (spa-Latn_eng-Latn)": 22.01, + "STS17 (ar-ar)": 58.71, + "STS17 (en-ar)": 0.54, + "STS17 (en-de)": 27.54, + "STS17 (en-tr)": 0.43, + "STS17 (es-en)": 22.01, + "STS17 (es-es)": 78.37, + "STS17 (fr-en)": 30.7, + "STS17 (it-en)": 24.28, + "STS17 (ko-ko)": 43.37, + "STS17 (nl-en)": 24.51, + "STS22 (ara-Arab)": 17.54, + "STS22 (cmn-Hans)": 33.15, + "STS22 (fra-Latn)": 69.51, + "STS22 (deu-Latn_eng-Latn)": 42.86, + "STS22 (pol-Latn)": 19.22, + "STS22 (spa-Latn_eng-Latn)": 53.99, + "STS22 (pol-Latn_eng-Latn)": 42.67, + "STS22 (tur-Latn)": 21.6, + "STS22 (deu-Latn_fra-Latn)": 43.52, + "STS22 (fra-Latn_pol-Latn)": 16.9, + "STS22 (deu-Latn)": 22.53, + "STS22 (deu-Latn_pol-Latn)": 1.63, + "STS22 (en)": 65.67, + "STS22 (spa-Latn)": 43.98, + "STS22 (cmn-Hans_eng-Latn)": 44.39, + "STS22 (spa-Latn_ita-Latn)": 40.71, + "STS22 (ita-Latn)": 47.48, + "STS22 (rus-Cyrl)": 11.19, + "STS22 (ar)": 17.54, + "STS22 (de)": 22.53, + "STS22 (de-en)": 42.86, + "STS22 (de-fr)": 43.52, + "STS22 (de-pl)": 1.63, + "STS22 (es)": 43.98, + "STS22 (es-en)": 53.99, + "STS22 (es-it)": 40.71, + "STS22 (fr)": 69.51, + "STS22 (fr-pl)": 16.9, + "STS22 (it)": 47.48, + "STS22 (pl)": 19.22, + "STS22 (pl-en)": 42.67, + "STS22 (ru)": 11.19, + "STS22 (tr)": 21.6, + "STS22 (zh)": 33.15, + "STS22 (zh-en)": 44.39, + "STSB (cmn-Hans)": 36.66, + "STSBenchmark": 83.09, + "STSBenchmarkMultilingualSTS (nld-Latn)": 60.03, + "STSBenchmarkMultilingualSTS (spa-Latn)": 65.33, + "STSBenchmarkMultilingualSTS (ita-Latn)": 60.71, + "STSBenchmarkMultilingualSTS (cmn-Hans)": 38.93, + "STSBenchmarkMultilingualSTS (en)": 83.09, + "STSBenchmarkMultilingualSTS (por-Latn)": 63.85, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 56.09, + "STSBenchmarkMultilingualSTS (fra-Latn)": 66.68, + "STSBenchmarkMultilingualSTS (pol-Latn)": 60.2, + "STSBenchmarkMultilingualSTS (deu-Latn)": 63.28 } ] }, "Summarization": { "spearman": [ { - "Model": "sup-simcse-bert-base-uncased", - "SummEval": 31.17 + "Model": "all-MiniLM-L12-v2", + "SummEval": 27.9, + "SummEvalFr (fra-Latn)": 26.63 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "sup-simcse-bert-base-uncased" + "Model": "all-MiniLM-L12-v2" } ] } }, - "text-similarity-babbage-001": { + "instructor-large": { "BitextMining": { "f1": [ { - "Model": "text-similarity-babbage-001" + "Model": "instructor-large" } ] }, "Classification": { "accuracy": [ { - "Model": "text-similarity-babbage-001" + "Model": "instructor-large" } ] }, "Clustering": { - "v_measure": [ - { - "Model": "text-similarity-babbage-001", - "RedditClustering": 45.64, - "StackExchangeClustering": 53.01, - "TwentyNewsgroupsClustering": 42.01 + "v_measure": [ + { + "Model": "instructor-large" } ] }, "PairClassification": { "ap": [ { - "Model": "text-similarity-babbage-001", - "SprintDuplicateQuestions": 76.46, - "TwitterSemEval2015": 70.85, - "TwitterURLCorpus": 85.08 + "Model": "instructor-large" } ] }, "Reranking": { "map": [ { - "Model": "text-similarity-babbage-001", - "AskUbuntuDupQuestions": 54.68, - "SciDocsRR": 72.78, - "StackOverflowDupQuestions": 40.65 + "Model": "instructor-large" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "text-similarity-babbage-001" + "Model": "instructor-large", + "BrightRetrieval (pony)": 1.32, + "BrightRetrieval (sustainable_living)": 13.16, + "BrightRetrieval (aops)": 7.94, + "BrightRetrieval (biology)": 15.61, + "BrightRetrieval (stackoverflow)": 11.21, + "BrightRetrieval (theoremqa_theorems)": 9.29, + "BrightRetrieval (psychology)": 21.94, + "BrightRetrieval (economics)": 15.99, + "BrightRetrieval (robotics)": 11.45, + "BrightRetrieval (leetcode)": 20.0, + "BrightRetrieval (earth_science)": 21.52, + "BrightRetrieval (theoremqa_questions)": 20.07 } ] }, "STS": { "spearman": [ { - "Model": "text-similarity-babbage-001", - "BIOSSES": 78.12, - "SICK-R": 77.02, - "STSBenchmark": 84.32 + "Model": "instructor-large" } ] }, "Summarization": { "spearman": [ { - "Model": "text-similarity-babbage-001" + "Model": "instructor-large" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "text-similarity-babbage-001" + "Model": "instructor-large" } ] } }, - "LLM2Vec-Meta-Llama-3-unsupervised": { + "google-gecko-256.text-embedding-preview-0409": { "BitextMining": { "f1": [ { - "Model": "LLM2Vec-Meta-Llama-3-unsupervised" + "Model": "google-gecko-256.text-embedding-preview-0409" } ] }, "Classification": { "accuracy": [ { - "Model": "LLM2Vec-Meta-Llama-3-unsupervised", - "AmazonCounterfactualClassification (en)": 75.7, - "AmazonPolarityClassification": 80.68, - "AmazonReviewsClassification (en)": 40.0, - "Banking77Classification": 84.77, - "EmotionClassification": 47.08, - "ImdbClassification": 75.19, - "MTOPDomainClassification (en)": 94.47, - "MTOPIntentClassification (en)": 81.09, - "MassiveIntentClassification (en)": 75.01, - "MassiveScenarioClassification (en)": 79.16, - "ToxicConversationsClassification": 71.85, - "TweetSentimentExtractionClassification": 57.61 + "Model": "google-gecko-256.text-embedding-preview-0409", + "AmazonCounterfactualClassification (en)": 70.93, + "AmazonPolarityClassification": 97.34, + "AmazonReviewsClassification (en)": 48.47, + "Banking77Classification": 86.01, + "EmotionClassification": 51.53, + "ImdbClassification": 95.7, + "MTOPDomainClassification (en)": 98.02, + "MTOPIntentClassification (en)": 77.82, + "MassiveIntentClassification (en)": 75.67, + "MassiveScenarioClassification (en)": 85.16, + "ToxicConversationsClassification": 88.33, + "TweetSentimentExtractionClassification": 72.97 } ] }, "Clustering": { "v_measure": [ { - "Model": "LLM2Vec-Meta-Llama-3-unsupervised", - "ArxivClusteringP2P": 49.22, - "ArxivClusteringS2S": 41.71, - "BiorxivClusteringP2P": 38.39, - "BiorxivClusteringS2S": 31.31, - "MedrxivClusteringP2P": 31.47, - "MedrxivClusteringS2S": 27.87, - "RedditClustering": 43.67, - "RedditClusteringP2P": 61.67, - "StackExchangeClustering": 68.2, - "StackExchangeClusteringP2P": 36.36, - "TwentyNewsgroupsClustering": 32.01 + "Model": "google-gecko-256.text-embedding-preview-0409", + "ArxivClusteringP2P": 44.12, + "ArxivClusteringS2S": 36.54, + "BiorxivClusteringP2P": 36.28, + "BiorxivClusteringS2S": 33.09, + "MedrxivClusteringP2P": 32.08, + "MedrxivClusteringS2S": 30.84, + "RedditClustering": 62.24, + "RedditClusteringP2P": 63.7, + "StackExchangeClustering": 70.19, + "StackExchangeClusteringP2P": 36.1, + "TwentyNewsgroupsClustering": 50.6 } ] }, "PairClassification": { "ap": [ { - "Model": "LLM2Vec-Meta-Llama-3-unsupervised", - "SprintDuplicateQuestions": 88.14, - "TwitterSemEval2015": 66.6, - "TwitterURLCorpus": 79.3 + "Model": "google-gecko-256.text-embedding-preview-0409", + "SprintDuplicateQuestions": 96.49, + "TwitterSemEval2015": 78.23, + "TwitterURLCorpus": 87.04 } ] }, "Reranking": { "map": [ { - "Model": "LLM2Vec-Meta-Llama-3-unsupervised", - "AskUbuntuDupQuestions": 57.16, - "MindSmallReranking": 30.1, - "SciDocsRR": 76.28, - "StackOverflowDupQuestions": 48.82 + "Model": "google-gecko-256.text-embedding-preview-0409", + "AskUbuntuDupQuestions": 63.84, + "MindSmallReranking": 31.89, + "SciDocsRR": 81.62, + "StackOverflowDupQuestions": 53.76 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "LLM2Vec-Meta-Llama-3-unsupervised", - "ArguAna": 51.73, - "CQADupstackRetrieval": 32.4, - "ClimateFEVER": 23.58, - "DBPedia": 26.78, - "FEVER": 53.42, - "FiQA2018": 28.56, - "HotpotQA": 52.37, - "MSMARCO": 17.47, - "NFCorpus": 26.28, - "NQ": 37.65, - "QuoraRetrieval": 84.64, - "SCIDOCS": 10.39, - "SciFact": 66.36, - "TRECCOVID": 63.34, - "Touche2020": 12.82 + "Model": "google-gecko-256.text-embedding-preview-0409", + "ArguAna": 56.27, + "CQADupstackRetrieval": 45.41, + "ClimateFEVER": 29.35, + "DBPedia": 41.91, + "FEVER": 82.61, + "FiQA2018": 55.54, + "HotpotQA": 64.65, + "MSMARCO": 31.12, + "NFCorpus": 37.81, + "NQ": 57.37, + "QuoraRetrieval": 87.89, + "SCIDOCS": 18.21, + "SciFact": 70.86, + "TRECCOVID": 80.13, + "Touche2020": 27.4 } ] }, "STS": { "spearman": [ { - "Model": "LLM2Vec-Meta-Llama-3-unsupervised", - "BIOSSES": 84.67, - "SICK-R": 72.16, - "STS12": 61.6, - "STS13": 79.71, - "STS14": 72.11, - "STS15": 82.18, - "STS16": 79.41, - "STS17 (en-en)": 85.44, - "STS22 (en)": 63.9, - "STSBenchmark": 77.44 + "Model": "google-gecko-256.text-embedding-preview-0409", + "BIOSSES": 89.42, + "SICK-R": 81.67, + "STS12": 78.02, + "STS13": 90.1, + "STS14": 85.44, + "STS15": 89.64, + "STS16": 87.24, + "STS17 (en-en)": 90.46, + "STS22 (en)": 67.99, + "STSBenchmark": 89.33 } ] }, "Summarization": { "spearman": [ { - "Model": "LLM2Vec-Meta-Llama-3-unsupervised", - "SummEval": 31.45 + "Model": "google-gecko-256.text-embedding-preview-0409", + "SummEval": 32.36 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "LLM2Vec-Meta-Llama-3-unsupervised" + "Model": "google-gecko-256.text-embedding-preview-0409" } ] } }, - "bge-m3-instruct": { + "nb-bert-large": { "BitextMining": { "f1": [ { - "Model": "bge-m3-instruct" + "Model": "nb-bert-large", + "BornholmBitextMining": 4.53 } ] }, "Classification": { "accuracy": [ { - "Model": "bge-m3-instruct" + "Model": "nb-bert-large", + "AngryTweetsClassification": 52.14, + "DKHateClassification": 62.13, + "DanishPoliticalCommentsClassification": 35.04, + "LccSentimentClassification": 56.27, + "MassiveIntentClassification (da)": 57.03, + "MassiveIntentClassification (nb)": 62.68, + "MassiveIntentClassification (sv)": 55.02, + "MassiveScenarioClassification (da)": 60.43, + "MassiveScenarioClassification (nb)": 67.44, + "MassiveScenarioClassification (sv)": 57.12, + "NoRecClassification": 55.46, + "NordicLangClassification": 85.27, + "NorwegianParliament": 62.58, + "ScalaDaClassification": 62.85, + "ScalaNbClassification": 66.97 } ] }, "Clustering": { "v_measure": [ { - "Model": "bge-m3-instruct" + "Model": "nb-bert-large" } ] }, "PairClassification": { "ap": [ { - "Model": "bge-m3-instruct" + "Model": "nb-bert-large" } ] }, "Reranking": { "map": [ { - "Model": "bge-m3-instruct" + "Model": "nb-bert-large" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "bge-m3-instruct", - "ARCChallenge": 9.03, - "AlphaNLI": 24.69, - "HellaSwag": 25.55, - "PIQA": 19.03, - "Quail": 7.08, - "RARbCode": 39.58, - "RARbMath": 64.51, - "SIQA": 4.77, - "SpartQA": 7.0, - "TempReasonL1": 0.8, - "TempReasonL2Fact": 34.99, - "TempReasonL2Pure": 0.62, - "TempReasonL3Fact": 32.47, - "TempReasonL3Pure": 7.01, - "WinoGrande": 35.33 + "Model": "nb-bert-large" } ] }, "STS": { "spearman": [ { - "Model": "bge-m3-instruct" + "Model": "nb-bert-large" } ] }, "Summarization": { "spearman": [ { - "Model": "bge-m3-instruct" + "Model": "nb-bert-large" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "bge-m3-instruct" + "Model": "nb-bert-large" } ] } }, - "multi-qa-MiniLM-L6-cos-v1": { + "LLM2Vec-Llama-2-supervised": { "BitextMining": { "f1": [ { - "Model": "multi-qa-MiniLM-L6-cos-v1" + "Model": "LLM2Vec-Llama-2-supervised" } ] }, "Classification": { "accuracy": [ { - "Model": "multi-qa-MiniLM-L6-cos-v1", - "AmazonReviewsClassification (fr)": 27.05, - "MTOPDomainClassification (fr)": 72.97, - "MTOPIntentClassification (fr)": 37.18, - "MasakhaNEWSClassification (fra)": 75.62, - "MassiveIntentClassification (fr)": 42.64, - "MassiveScenarioClassification (fr)": 49.92 - } - ] - }, - "Clustering": { - "v_measure": [ - { - "Model": "multi-qa-MiniLM-L6-cos-v1", - "AlloProfClusteringP2P": 49.13, - "AlloProfClusteringS2S": 26.16, - "HALClusteringS2S": 12.49, - "MLSUMClusteringP2P": 35.15, - "MLSUMClusteringS2S": 25.95, - "MasakhaNEWSClusteringP2P (fra)": 53.73, - "MasakhaNEWSClusteringS2S (fra)": 27.27 + "Model": "LLM2Vec-Llama-2-supervised", + "AmazonCounterfactualClassification (en)": 82.22, + "AmazonPolarityClassification": 89.69, + "AmazonReviewsClassification (en)": 48.47, + "Banking77Classification": 88.17, + "EmotionClassification": 51.71, + "ImdbClassification": 85.78, + "MTOPDomainClassification (en)": 95.57, + "MTOPIntentClassification (en)": 82.81, + "MassiveIntentClassification (en)": 78.06, + "MassiveScenarioClassification (en)": 81.35, + "ToxicConversationsClassification": 71.01, + "TweetSentimentExtractionClassification": 61.11 + } + ] + }, + "Clustering": { + "v_measure": [ + { + "Model": "LLM2Vec-Llama-2-supervised", + "ArxivClusteringP2P": 43.14, + "ArxivClusteringS2S": 42.38, + "BiorxivClusteringP2P": 35.88, + "BiorxivClusteringS2S": 34.81, + "MedrxivClusteringP2P": 32.23, + "MedrxivClusteringS2S": 31.37, + "RedditClustering": 61.1, + "RedditClusteringP2P": 64.52, + "StackExchangeClustering": 67.98, + "StackExchangeClusteringP2P": 33.2, + "TwentyNewsgroupsClustering": 51.04 } ] }, "PairClassification": { "ap": [ { - "Model": "multi-qa-MiniLM-L6-cos-v1", - "OpusparcusPC (fr)": 88.07, - "PawsXPairClassification (fr)": 57.36 + "Model": "LLM2Vec-Llama-2-supervised", + "SprintDuplicateQuestions": 96.83, + "TwitterSemEval2015": 80.7, + "TwitterURLCorpus": 86.56 } ] }, "Reranking": { "map": [ { - "Model": "multi-qa-MiniLM-L6-cos-v1", - "AlloprofReranking": 40.28, - "SyntecReranking": 65.08 + "Model": "LLM2Vec-Llama-2-supervised", + "AskUbuntuDupQuestions": 63.13, + "MindSmallReranking": 31.34, + "SciDocsRR": 84.03, + "StackOverflowDupQuestions": 51.02 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "multi-qa-MiniLM-L6-cos-v1", - "AlloprofRetrieval": 30.23, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 16.31, - "SyntecRetrieval": 58.07, - "XPQARetrieval (fr)": 48.83 + "Model": "LLM2Vec-Llama-2-supervised", + "ArguAna": 56.53, + "CQADupstackRetrieval": 45.94, + "ClimateFEVER": 30.7, + "DBPedia": 48.42, + "FEVER": 89.93, + "FiQA2018": 51.28, + "HotpotQA": 72.99, + "MSMARCO": 41.46, + "NFCorpus": 40.33, + "NQ": 61.24, + "QuoraRetrieval": 85.59, + "SCIDOCS": 21.05, + "SciFact": 77.3, + "TRECCOVID": 79.25, + "Touche2020": 16.92 } ] }, "STS": { "spearman": [ { - "Model": "multi-qa-MiniLM-L6-cos-v1", - "SICKFr": 62.11, - "STS22 (fr)": 74.62, - "STSBenchmarkMultilingualSTS (fr)": 63.85 + "Model": "LLM2Vec-Llama-2-supervised", + "BIOSSES": 82.13, + "SICK-R": 83.01, + "STS12": 78.85, + "STS13": 86.84, + "STS14": 84.04, + "STS15": 88.72, + "STS16": 86.79, + "STS17 (en-en)": 90.63, + "STS22 (en)": 67.55, + "STSBenchmark": 88.72 } ] }, "Summarization": { "spearman": [ { - "Model": "multi-qa-MiniLM-L6-cos-v1", - "SummEvalFr": 27.59 + "Model": "LLM2Vec-Llama-2-supervised", + "SummEval": 28.49 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "multi-qa-MiniLM-L6-cos-v1" + "Model": "LLM2Vec-Llama-2-supervised" } ] } }, - "titan-embed-text-v1": { + "sbert_large_mt_nlu_ru": { "BitextMining": { "f1": [ { - "Model": "titan-embed-text-v1" + "Model": "sbert_large_mt_nlu_ru" } ] }, "Classification": { "accuracy": [ { - "Model": "titan-embed-text-v1", - "AmazonCounterfactualClassification (en)": 61.85, - "Banking77Classification": 83.21 + "Model": "sbert_large_mt_nlu_ru", + "GeoreviewClassification (rus-Cyrl)": 39.67, + "HeadlineClassification (rus-Cyrl)": 77.19, + "InappropriatenessClassification (rus-Cyrl)": 64.64, + "KinopoiskClassification (rus-Cyrl)": 50.33, + "MassiveIntentClassification (rus-Cyrl)": 61.42, + "MassiveScenarioClassification (rus-Cyrl)": 68.13, + "RuReviewsClassification (rus-Cyrl)": 58.29, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 54.19, + "RuSciBenchOECDClassification (rus-Cyrl)": 43.8 } ] }, "Clustering": { "v_measure": [ { - "Model": "titan-embed-text-v1" + "Model": "sbert_large_mt_nlu_ru", + "GeoreviewClusteringP2P (rus-Cyrl)": 58.45, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 52.2, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 47.29 } ] }, "PairClassification": { "ap": [ { - "Model": "titan-embed-text-v1" + "Model": "sbert_large_mt_nlu_ru", + "TERRa (rus-Cyrl)": 51.97 } ] }, "Reranking": { "map": [ { - "Model": "titan-embed-text-v1", - "SciDocsRR": 88.87 + "Model": "sbert_large_mt_nlu_ru", + "RuBQReranking (rus-Cyrl)": 56.13 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "titan-embed-text-v1", - "ArguAna": 48.83, - "FiQA2018": 40.38, - "MSMARCO": 35.19, - "NQ": 51.08, - "SciFact": 73.5, - "TRECCOVID": 54.74 + "Model": "sbert_large_mt_nlu_ru", + "RiaNewsRetrieval (rus-Cyrl)": 21.4, + "RuBQRetrieval (rus-Cyrl)": 29.8 } ] }, "STS": { "spearman": [ { - "Model": "titan-embed-text-v1", - "BIOSSES": 84.17, - "SICK-R": 73.05, - "STS12": 66.59, - "STS13": 83.24, - "STS14": 73.71, - "STS15": 82.4, - "STS16": NaN, - "STS17 (en-en)": 80.9, - "STSBenchmark": 74.85 + "Model": "sbert_large_mt_nlu_ru", + "RUParaPhraserSTS (rus-Cyrl)": 65.17, + "RuSTSBenchmarkSTS (rus-Cyrl)": 71.22, + "STS22 (rus-Cyrl)": 56.82 } ] }, "Summarization": { "spearman": [ { - "Model": "titan-embed-text-v1" + "Model": "sbert_large_mt_nlu_ru" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "titan-embed-text-v1" + "Model": "sbert_large_mt_nlu_ru" } ] } }, - "udever-bloom-560m": { + "bge-m3-instruct": { "BitextMining": { "f1": [ { - "Model": "udever-bloom-560m" + "Model": "bge-m3-instruct" } ] }, "Classification": { "accuracy": [ { - "Model": "udever-bloom-560m", - "AmazonReviewsClassification (fr)": 26.85, - "MTOPDomainClassification (fr)": 34.99, - "MTOPIntentClassification (fr)": 15.76, - "MasakhaNEWSClassification (fra)": 67.94, - "MassiveIntentClassification (fr)": 15.09, - "MassiveScenarioClassification (fr)": 21.67 + "Model": "bge-m3-instruct" } ] }, "Clustering": { "v_measure": [ { - "Model": "udever-bloom-560m", - "AlloProfClusteringP2P": 53.57, - "AlloProfClusteringS2S": 22.13, - "HALClusteringS2S": 7.68, - "MLSUMClusteringP2P": 36.43, - "MLSUMClusteringS2S": 25.26, - "MasakhaNEWSClusteringP2P (fra)": 37.57, - "MasakhaNEWSClusteringS2S (fra)": 20.58 + "Model": "bge-m3-instruct" } ] }, "PairClassification": { "ap": [ { - "Model": "udever-bloom-560m", - "OpusparcusPC (fr)": 82.1, - "PawsXPairClassification (fr)": 59.69 + "Model": "bge-m3-instruct" } ] }, "Reranking": { "map": [ { - "Model": "udever-bloom-560m", - "AlloprofReranking": 28.75, - "SyntecReranking": 50.88 + "Model": "bge-m3-instruct" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "udever-bloom-560m", - "AlloprofRetrieval": 1.98, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 0.48, - "SyntecRetrieval": 24.45, - "XPQARetrieval (fr)": 12.98 + "Model": "bge-m3-instruct", + "ARCChallenge": 9.03, + "AlphaNLI": 24.69, + "HellaSwag": 25.55, + "PIQA": 19.03, + "Quail": 7.08, + "RARbCode": 39.58, + "RARbMath": 64.51, + "SIQA": 4.77, + "SpartQA": 7.0, + "TempReasonL1": 0.8, + "TempReasonL2Fact": 34.99, + "TempReasonL2Pure": 0.62, + "TempReasonL3Fact": 32.47, + "TempReasonL3Pure": 7.01, + "WinoGrande": 35.33 } ] }, "STS": { "spearman": [ { - "Model": "udever-bloom-560m", - "SICKFr": 54.54, - "STS22 (fr)": 61.35, - "STSBenchmarkMultilingualSTS (fr)": 36.78 + "Model": "bge-m3-instruct" } ] }, "Summarization": { "spearman": [ { - "Model": "udever-bloom-560m", - "SummEvalFr": 23.63 + "Model": "bge-m3-instruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "udever-bloom-560m" + "Model": "bge-m3-instruct" } ] } }, - "LLM2Vec-Llama-2-supervised": { + "text2vec-large-chinese": { "BitextMining": { "f1": [ { - "Model": "LLM2Vec-Llama-2-supervised" + "Model": "text2vec-large-chinese" } ] }, "Classification": { "accuracy": [ { - "Model": "LLM2Vec-Llama-2-supervised", - "AmazonCounterfactualClassification (en)": 82.22, - "AmazonPolarityClassification": 89.69, - "AmazonReviewsClassification (en)": 48.47, - "Banking77Classification": 88.17, - "EmotionClassification": 51.71, - "ImdbClassification": 85.78, - "MTOPDomainClassification (en)": 95.57, - "MTOPIntentClassification (en)": 82.81, - "MassiveIntentClassification (en)": 78.06, - "MassiveScenarioClassification (en)": 81.35, - "ToxicConversationsClassification": 71.01, - "TweetSentimentExtractionClassification": 61.11 + "Model": "text2vec-large-chinese", + "AmazonReviewsClassification (zh)": 33.77, + "IFlyTek": 41.54, + "JDReview": 81.56, + "MassiveIntentClassification (zh-CN)": 63.23, + "MassiveScenarioClassification (zh-CN)": 68.45, + "MultilingualSentiment": 58.97, + "OnlineShopping": 83.51, + "TNews": 38.92, + "Waimai": 76.01 } ] }, "Clustering": { "v_measure": [ { - "Model": "LLM2Vec-Llama-2-supervised", - "ArxivClusteringP2P": 43.14, - "ArxivClusteringS2S": 42.38, - "BiorxivClusteringP2P": 35.88, - "BiorxivClusteringS2S": 34.81, - "MedrxivClusteringP2P": 32.23, - "MedrxivClusteringS2S": 31.37, - "RedditClustering": 61.1, - "RedditClusteringP2P": 64.52, - "StackExchangeClustering": 67.98, - "StackExchangeClusteringP2P": 33.2, - "TwentyNewsgroupsClustering": 51.04 + "Model": "text2vec-large-chinese", + "CLSClusteringP2P": 30.13, + "CLSClusteringS2S": 28.77, + "ThuNewsClusteringP2P": 35.05, + "ThuNewsClusteringS2S": 26.14 } ] }, "PairClassification": { "ap": [ { - "Model": "LLM2Vec-Llama-2-supervised", - "SprintDuplicateQuestions": 96.83, - "TwitterSemEval2015": 80.7, - "TwitterURLCorpus": 86.56 + "Model": "text2vec-large-chinese", + "Cmnli": 77.67, + "Ocnli": 64.04 } ] }, "Reranking": { "map": [ { - "Model": "LLM2Vec-Llama-2-supervised", - "AskUbuntuDupQuestions": 63.13, - "MindSmallReranking": 31.34, - "SciDocsRR": 84.03, - "StackOverflowDupQuestions": 51.02 + "Model": "text2vec-large-chinese", + "CMedQAv1": 58.92, + "CMedQAv2": 60.41, + "MMarcoReranking": 12.48, + "T2Reranking": 64.82 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "LLM2Vec-Llama-2-supervised", - "ArguAna": 56.53, - "CQADupstackRetrieval": 45.94, - "ClimateFEVER": 30.7, - "DBPedia": 48.42, - "FEVER": 89.93, - "FiQA2018": 51.28, - "HotpotQA": 72.99, - "MSMARCO": 41.46, - "NFCorpus": 40.33, - "NQ": 61.24, - "QuoraRetrieval": 85.59, - "SCIDOCS": 21.05, - "SciFact": 77.3, - "TRECCOVID": 79.25, - "Touche2020": 16.92 + "Model": "text2vec-large-chinese", + "CmedqaRetrieval": 15.53, + "CovidRetrieval": 60.48, + "DuRetrieval": 51.87, + "EcomRetrieval": 37.58, + "MMarcoRetrieval": 45.96, + "MedicalRetrieval": 30.93, + "T2Retrieval": 50.52, + "VideoRetrieval": 42.65 } ] }, "STS": { "spearman": [ { - "Model": "LLM2Vec-Llama-2-supervised", - "BIOSSES": 82.13, - "SICK-R": 83.01, - "STS12": 78.85, - "STS13": 86.84, - "STS14": 84.04, - "STS15": 88.72, - "STS16": 86.79, - "STS17 (en-en)": 90.63, - "STS22 (en)": 67.55, - "STSBenchmark": 88.72 + "Model": "text2vec-large-chinese", + "AFQMC": 24.51, + "ATEC": 32.45, + "BQ": 44.22, + "LCQMC": 69.16, + "PAWSX": 14.55, + "QBQTC": 29.51, + "STS22 (zh)": 65.94, + "STSB": 79.45 } ] }, "Summarization": { "spearman": [ { - "Model": "LLM2Vec-Llama-2-supervised", - "SummEval": 28.49 + "Model": "text2vec-large-chinese" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "LLM2Vec-Llama-2-supervised" + "Model": "text2vec-large-chinese" } ] } }, - "bert-base-25lang-cased": { + "gtr-t5-large": { "BitextMining": { "f1": [ { - "Model": "bert-base-25lang-cased" + "Model": "gtr-t5-large" } ] }, "Classification": { "accuracy": [ { - "Model": "bert-base-25lang-cased", - "AmazonReviewsClassification (fr)": 29.39, - "MTOPDomainClassification (fr)": 63.63, - "MTOPIntentClassification (fr)": 37.86, - "MasakhaNEWSClassification (fra)": 63.91, - "MassiveIntentClassification (fr)": 37.3, - "MassiveScenarioClassification (fr)": 44.47 + "Model": "gtr-t5-large", + "AmazonCounterfactualClassification (de)": 59.38, + "AmazonCounterfactualClassification (en)": 70.03, + "AmazonCounterfactualClassification (en-ext)": 69.86, + "AmazonCounterfactualClassification (ja)": 45.87, + "AmazonPolarityClassification": 73.92, + "AmazonReviewsClassification (de)": 33.06, + "AmazonReviewsClassification (en)": 37.21, + "AmazonReviewsClassification (es)": 34.0, + "AmazonReviewsClassification (fr)": 33.48, + "AmazonReviewsClassification (ja)": 21.78, + "AmazonReviewsClassification (zh)": 21.83, + "Banking77Classification": 81.21, + "EmotionClassification": 46.33, + "ImdbClassification": 70.86, + "MTOPDomainClassification (de)": 81.91, + "MTOPDomainClassification (en)": 94.01, + "MTOPDomainClassification (es)": 84.7, + "MTOPDomainClassification (fr)": 82.48, + "MTOPDomainClassification (hi)": 22.11, + "MTOPDomainClassification (th)": 16.36, + "MTOPIntentClassification (de)": 52.13, + "MTOPIntentClassification (en)": 63.86, + "MTOPIntentClassification (es)": 52.62, + "MTOPIntentClassification (fr)": 46.39, + "MTOPIntentClassification (hi)": 3.9, + "MTOPIntentClassification (th)": 5.38, + "MassiveIntentClassification (af)": 41.02, + "MassiveIntentClassification (am)": 2.34, + "MassiveIntentClassification (ar)": 4.87, + "MassiveIntentClassification (az)": 34.92, + "MassiveIntentClassification (bn)": 2.52, + "MassiveIntentClassification (cy)": 35.87, + "MassiveIntentClassification (da)": 45.3, + "MassiveIntentClassification (de)": 51.48, + "MassiveIntentClassification (el)": 10.0, + "MassiveIntentClassification (en)": 70.06, + "MassiveIntentClassification (es)": 53.3, + "MassiveIntentClassification (fa)": 3.59, + "MassiveIntentClassification (fi)": 37.35, + "MassiveIntentClassification (fr)": 54.83, + "MassiveIntentClassification (he)": 2.52, + "MassiveIntentClassification (hi)": 2.88, + "MassiveIntentClassification (hu)": 33.52, + "MassiveIntentClassification (hy)": 3.13, + "MassiveIntentClassification (id)": 40.11, + "MassiveIntentClassification (is)": 34.77, + "MassiveIntentClassification (it)": 51.21, + "MassiveIntentClassification (ja)": 4.75, + "MassiveIntentClassification (jv)": 35.6, + "MassiveIntentClassification (ka)": 2.71, + "MassiveIntentClassification (km)": 5.48, + "MassiveIntentClassification (kn)": 2.44, + "MassiveIntentClassification (ko)": 2.59, + "MassiveIntentClassification (lv)": 38.15, + "MassiveIntentClassification (ml)": 2.67, + "MassiveIntentClassification (mn)": 18.47, + "MassiveIntentClassification (ms)": 35.58, + "MassiveIntentClassification (my)": 4.35, + "MassiveIntentClassification (nb)": 43.78, + "MassiveIntentClassification (nl)": 45.96, + "MassiveIntentClassification (pl)": 39.08, + "MassiveIntentClassification (pt)": 52.27, + "MassiveIntentClassification (ro)": 46.39, + "MassiveIntentClassification (ru)": 16.82, + "MassiveIntentClassification (sl)": 37.3, + "MassiveIntentClassification (sq)": 41.73, + "MassiveIntentClassification (sv)": 43.51, + "MassiveIntentClassification (sw)": 35.97, + "MassiveIntentClassification (ta)": 1.52, + "MassiveIntentClassification (te)": 2.57, + "MassiveIntentClassification (th)": 3.94, + "MassiveIntentClassification (tl)": 41.03, + "MassiveIntentClassification (tr)": 33.75, + "MassiveIntentClassification (ur)": 2.57, + "MassiveIntentClassification (vi)": 25.23, + "MassiveIntentClassification (zh-CN)": 2.41, + "MassiveIntentClassification (zh-TW)": 4.64, + "MassiveScenarioClassification (af)": 51.48, + "MassiveScenarioClassification (am)": 7.74, + "MassiveScenarioClassification (ar)": 12.03, + "MassiveScenarioClassification (az)": 41.77, + "MassiveScenarioClassification (bn)": 8.07, + "MassiveScenarioClassification (cy)": 43.67, + "MassiveScenarioClassification (da)": 54.88, + "MassiveScenarioClassification (de)": 63.63, + "MassiveScenarioClassification (el)": 16.83, + "MassiveScenarioClassification (en)": 75.49, + "MassiveScenarioClassification (es)": 61.48, + "MassiveScenarioClassification (fa)": 6.48, + "MassiveScenarioClassification (fi)": 43.54, + "MassiveScenarioClassification (fr)": 64.06, + "MassiveScenarioClassification (he)": 8.03, + "MassiveScenarioClassification (hi)": 7.5, + "MassiveScenarioClassification (hu)": 42.59, + "MassiveScenarioClassification (hy)": 9.22, + "MassiveScenarioClassification (id)": 48.67, + "MassiveScenarioClassification (is)": 43.87, + "MassiveScenarioClassification (it)": 59.83, + "MassiveScenarioClassification (ja)": 5.62, + "MassiveScenarioClassification (jv)": 42.18, + "MassiveScenarioClassification (ka)": 7.52, + "MassiveScenarioClassification (km)": 9.55, + "MassiveScenarioClassification (kn)": 8.34, + "MassiveScenarioClassification (ko)": 6.11, + "MassiveScenarioClassification (lv)": 43.35, + "MassiveScenarioClassification (ml)": 7.28, + "MassiveScenarioClassification (mn)": 23.94, + "MassiveScenarioClassification (ms)": 45.18, + "MassiveScenarioClassification (my)": 9.33, + "MassiveScenarioClassification (nb)": 52.71, + "MassiveScenarioClassification (nl)": 57.02, + "MassiveScenarioClassification (pl)": 46.79, + "MassiveScenarioClassification (pt)": 59.45, + "MassiveScenarioClassification (ro)": 56.8, + "MassiveScenarioClassification (ru)": 25.85, + "MassiveScenarioClassification (sl)": 42.51, + "MassiveScenarioClassification (sq)": 50.41, + "MassiveScenarioClassification (sv)": 54.16, + "MassiveScenarioClassification (sw)": 43.02, + "MassiveScenarioClassification (ta)": 7.21, + "MassiveScenarioClassification (te)": 6.9, + "MassiveScenarioClassification (th)": 8.7, + "MassiveScenarioClassification (tl)": 51.76, + "MassiveScenarioClassification (tr)": 42.54, + "MassiveScenarioClassification (ur)": 9.32, + "MassiveScenarioClassification (vi)": 31.51, + "MassiveScenarioClassification (zh-CN)": 3.84, + "MassiveScenarioClassification (zh-TW)": 8.16, + "ToxicConversationsClassification": 68.65, + "TweetSentimentExtractionClassification": 54.09 } ] }, "Clustering": { "v_measure": [ { - "Model": "bert-base-25lang-cased", - "AlloProfClusteringP2P": 53.49, - "AlloProfClusteringS2S": 43.1, - "HALClusteringS2S": 19.78, - "MLSUMClusteringP2P": 40.73, - "MLSUMClusteringS2S": 31.94, - "MasakhaNEWSClusteringP2P (fra)": 24.23, - "MasakhaNEWSClusteringS2S (fra)": 24.46 + "Model": "gtr-t5-large", + "ArxivClusteringP2P": 37.5, + "ArxivClusteringS2S": 30.55, + "BiorxivClusteringP2P": 29.59, + "BiorxivClusteringS2S": 25.72, + "MedrxivClusteringP2P": 28.72, + "MedrxivClusteringS2S": 27.39, + "RedditClustering": 61.69, + "RedditClusteringP2P": 61.67, + "StackExchangeClustering": 69.93, + "StackExchangeClusteringP2P": 33.21, + "TwentyNewsgroupsClustering": 51.64 } ] }, "PairClassification": { "ap": [ { - "Model": "bert-base-25lang-cased", - "OpusparcusPC (fr)": 86.79, - "PawsXPairClassification (fr)": 53.39 + "Model": "gtr-t5-large", + "SprintDuplicateQuestions": 95.05, + "TwitterSemEval2015": 76.03, + "TwitterURLCorpus": 84.89 } ] }, "Reranking": { "map": [ { - "Model": "bert-base-25lang-cased", - "AlloprofReranking": 36.25, - "SyntecReranking": 53.25 + "Model": "gtr-t5-large", + "AskUbuntuDupQuestions": 61.64, + "MindSmallReranking": 31.84, + "SciDocsRR": 76.39, + "StackOverflowDupQuestions": 51.58 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "bert-base-25lang-cased", - "AlloprofRetrieval": 1.6, - "BSARDRetrieval": 0.0, - "MintakaRetrieval (fr)": 3.55, - "SyntecRetrieval": 18.95, - "XPQARetrieval (fr)": 18.46 + "Model": "gtr-t5-large", + "ArguAna": 52.09, + "CQADupstackRetrieval": 36.62, + "ClimateFEVER": 26.9, + "DBPedia": 39.55, + "FEVER": 72.66, + "FiQA2018": 42.79, + "HotpotQA": 57.85, + "MSMARCO": 42.73, + "NFCorpus": 32.63, + "NQ": 55.09, + "QuoraRetrieval": 88.47, + "SCIDOCS": 15.51, + "SciFact": 63.42, + "TRECCOVID": 56.68, + "Touche2020": 28.29 } ] }, - "STS": { - "spearman": [ - { - "Model": "bert-base-25lang-cased", - "SICKFr": 58.76, - "STS22 (fr)": 38.77, - "STSBenchmarkMultilingualSTS (fr)": 52.25 + "STS": { + "spearman": [ + { + "Model": "gtr-t5-large", + "BIOSSES": 84.86, + "SICK-R": 73.39, + "STS12": 70.33, + "STS13": 82.19, + "STS14": 77.16, + "STS15": 86.31, + "STS16": 81.85, + "STS17 (ar-ar)": 10.19, + "STS17 (en-ar)": -5.77, + "STS17 (en-de)": 67.43, + "STS17 (en-en)": 83.93, + "STS17 (en-tr)": 8.75, + "STS17 (es-en)": 54.96, + "STS17 (es-es)": 82.74, + "STS17 (fr-en)": 60.5, + "STS17 (it-en)": 46.26, + "STS17 (ko-ko)": 8.96, + "STS17 (nl-en)": 47.48, + "STS22 (ar)": 34.97, + "STS22 (de)": 51.7, + "STS22 (de-en)": 48.76, + "STS22 (de-fr)": 57.5, + "STS22 (de-pl)": 32.76, + "STS22 (en)": 64.3, + "STS22 (es)": 57.49, + "STS22 (es-en)": 67.76, + "STS22 (es-it)": 57.18, + "STS22 (fr)": 78.7, + "STS22 (fr-pl)": 61.98, + "STS22 (it)": 67.67, + "STS22 (pl)": 30.68, + "STS22 (pl-en)": 54.17, + "STS22 (ru)": 15.36, + "STS22 (tr)": 58.12, + "STS22 (zh)": 27.32, + "STS22 (zh-en)": 29.42, + "STSBenchmark": 77.6 } ] }, "Summarization": { "spearman": [ { - "Model": "bert-base-25lang-cased", - "SummEvalFr": 28.84 + "Model": "gtr-t5-large", + "SummEval": 29.5 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "bert-base-25lang-cased" + "Model": "gtr-t5-large" } ] } }, - "flan-t5-base": { + "OpenSearch-text-hybrid": { "BitextMining": { "f1": [ { - "Model": "flan-t5-base" + "Model": "OpenSearch-text-hybrid" } ] }, "Classification": { "accuracy": [ { - "Model": "flan-t5-base" + "Model": "OpenSearch-text-hybrid", + "AmazonReviewsClassification (zh)": 46.18, + "IFlyTek": 51.8, + "JDReview": 86.02, + "MassiveIntentClassification (zh-CN)": 73.85, + "MassiveScenarioClassification (zh-CN)": 77.13, + "MultilingualSentiment": 76.35, + "OnlineShopping": 93.2, + "TNews": 53.06, + "Waimai": 88.1 } ] }, "Clustering": { "v_measure": [ { - "Model": "flan-t5-base" + "Model": "OpenSearch-text-hybrid", + "CLSClusteringP2P": 41.64, + "CLSClusteringS2S": 40.33, + "ThuNewsClusteringP2P": 69.28, + "ThuNewsClusteringS2S": 63.75 } ] }, "PairClassification": { "ap": [ { - "Model": "flan-t5-base" + "Model": "OpenSearch-text-hybrid", + "Cmnli": 90.77, + "Ocnli": 85.44 } ] }, "Reranking": { "map": [ { - "Model": "flan-t5-base" + "Model": "OpenSearch-text-hybrid", + "CMedQAv1": 88.99, + "CMedQAv2": 89.6, + "MMarcoReranking": 28.12, + "T2Reranking": 66.38 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "flan-t5-base" + "Model": "OpenSearch-text-hybrid", + "CmedqaRetrieval": 46.56, + "CovidRetrieval": 84.03, + "DuRetrieval": 87.85, + "EcomRetrieval": 68.79, + "MMarcoRetrieval": 79.93, + "MedicalRetrieval": 65.92, + "T2Retrieval": 86.76, + "VideoRetrieval": 75.43 } ] }, "STS": { "spearman": [ { - "Model": "flan-t5-base" + "Model": "OpenSearch-text-hybrid", + "AFQMC": 59.11, + "ATEC": 58.19, + "BQ": 71.07, + "LCQMC": 78.27, + "PAWSX": 44.98, + "QBQTC": 38.69, + "STS22 (zh)": 66.53, + "STSB": 82.8 } ] }, "Summarization": { "spearman": [ { - "Model": "flan-t5-base" + "Model": "OpenSearch-text-hybrid" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "flan-t5-base", - "Core17InstructionRetrieval": -3.31, - "News21InstructionRetrieval": -0.12, - "Robust04InstructionRetrieval": 5.35 + "Model": "OpenSearch-text-hybrid" } ] } }, - "sbert_large_mt_nlu_ru": { + "voyage-lite-01-instruct": { "BitextMining": { "f1": [ { - "Model": "sbert_large_mt_nlu_ru" + "Model": "voyage-lite-01-instruct" } ] }, "Classification": { "accuracy": [ { - "Model": "sbert_large_mt_nlu_ru", - "GeoreviewClassification (rus-Cyrl)": 39.67, - "HeadlineClassification (rus-Cyrl)": 77.19, - "InappropriatenessClassification (rus-Cyrl)": 64.64, - "KinopoiskClassification (rus-Cyrl)": 50.33, - "MassiveIntentClassification (rus-Cyrl)": 61.42, - "MassiveScenarioClassification (rus-Cyrl)": 68.13, - "RuReviewsClassification (rus-Cyrl)": 58.29, - "RuSciBenchGRNTIClassification (rus-Cyrl)": 54.19, - "RuSciBenchOECDClassification (rus-Cyrl)": 43.8 + "Model": "voyage-lite-01-instruct", + "AmazonCounterfactualClassification (en)": 71.43, + "AmazonPolarityClassification": 96.41, + "AmazonReviewsClassification (en)": 57.06, + "Banking77Classification": 81.64, + "EmotionClassification": 48.29, + "ImdbClassification": 95.49, + "MTOPDomainClassification (en)": 96.3, + "MTOPIntentClassification (en)": 67.93, + "MassiveIntentClassification (en)": 71.29, + "MassiveScenarioClassification (en)": 76.74, + "ToxicConversationsClassification": 75.45, + "TweetSentimentExtractionClassification": 59.44 } ] }, "Clustering": { "v_measure": [ { - "Model": "sbert_large_mt_nlu_ru", - "GeoreviewClusteringP2P (rus-Cyrl)": 58.45, - "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 52.2, - "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 47.29 + "Model": "voyage-lite-01-instruct", + "ArxivClusteringP2P": 47.92, + "ArxivClusteringS2S": 42.42, + "BiorxivClusteringP2P": 38.72, + "BiorxivClusteringS2S": 36.6, + "MedrxivClusteringP2P": 34.04, + "MedrxivClusteringS2S": 32.81, + "RedditClustering": 61.56, + "RedditClusteringP2P": 65.35, + "StackExchangeClustering": 70.16, + "StackExchangeClusteringP2P": 38.23, + "TwentyNewsgroupsClustering": 53.56 } ] }, "PairClassification": { "ap": [ { - "Model": "sbert_large_mt_nlu_ru", - "TERRa (rus-Cyrl)": 51.97 + "Model": "voyage-lite-01-instruct", + "SprintDuplicateQuestions": 96.01, + "TwitterSemEval2015": 76.87, + "TwitterURLCorpus": 86.84 } ] }, "Reranking": { "map": [ { - "Model": "sbert_large_mt_nlu_ru", - "RuBQReranking (rus-Cyrl)": 56.13 + "Model": "voyage-lite-01-instruct", + "AskUbuntuDupQuestions": 65.77, + "MindSmallReranking": 31.69, + "SciDocsRR": 87.03, + "StackOverflowDupQuestions": 54.49 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "sbert_large_mt_nlu_ru", - "RiaNewsRetrieval (rus-Cyrl)": 21.4, - "RuBQRetrieval (rus-Cyrl)": 29.8 + "Model": "voyage-lite-01-instruct", + "ArguAna": 58.73, + "CQADupstackRetrieval": 45.11, + "ClimateFEVER": 37.47, + "DBPedia": 43.42, + "FEVER": 89.71, + "FiQA2018": 44.79, + "HotpotQA": 70.46, + "MSMARCO": 39.66, + "NFCorpus": 43.33, + "NQ": 60.65, + "QuoraRetrieval": 87.83, + "SCIDOCS": 23.19, + "SciFact": 73.64, + "TRECCOVID": 78.92, + "Touche2020": 36.83 } ] }, "STS": { "spearman": [ { - "Model": "sbert_large_mt_nlu_ru", - "RUParaPhraserSTS (rus-Cyrl)": 65.17, - "RuSTSBenchmarkSTS (rus-Cyrl)": 71.22, - "STS22 (rus-Cyrl)": 56.82 + "Model": "voyage-lite-01-instruct", + "BIOSSES": 84.85, + "SICK-R": 79.71, + "STS12": 77.09, + "STS13": 88.91, + "STS14": 82.08, + "STS15": 89.21, + "STS16": 84.74, + "STS17 (en-en)": 90.73, + "STS22 (en)": 62.1, + "STSBenchmark": 89.86 } ] }, "Summarization": { "spearman": [ { - "Model": "sbert_large_mt_nlu_ru" + "Model": "voyage-lite-01-instruct", + "SummEval": 30.97 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "sbert_large_mt_nlu_ru" + "Model": "voyage-lite-01-instruct" } ] } }, - "monobert-large-msmarco": { + "allenai-specter": { "BitextMining": { "f1": [ { - "Model": "monobert-large-msmarco" + "Model": "allenai-specter" } ] }, "Classification": { "accuracy": [ { - "Model": "monobert-large-msmarco" + "Model": "allenai-specter", + "AmazonCounterfactualClassification (de)": 54.46, + "AmazonCounterfactualClassification (en)": 58.7, + "AmazonCounterfactualClassification (en-ext)": 59.28, + "AmazonCounterfactualClassification (ja)": 43.87, + "AmazonPolarityClassification": 57.77, + "AmazonReviewsClassification (de)": 24.08, + "AmazonReviewsClassification (en)": 26.26, + "AmazonReviewsClassification (es)": 23.88, + "AmazonReviewsClassification (fr)": 23.31, + "AmazonReviewsClassification (ja)": 20.25, + "AmazonReviewsClassification (zh)": 20.49, + "Banking77Classification": 66.66, + "EmotionClassification": 24.82, + "ImdbClassification": 56.35, + "MTOPDomainClassification (de)": 48.55, + "MTOPDomainClassification (en)": 74.53, + "MTOPDomainClassification (es)": 58.39, + "MTOPDomainClassification (fr)": 54.61, + "MTOPDomainClassification (hi)": 21.22, + "MTOPDomainClassification (th)": 14.98, + "MTOPIntentClassification (de)": 35.55, + "MTOPIntentClassification (en)": 50.05, + "MTOPIntentClassification (es)": 36.72, + "MTOPIntentClassification (fr)": 34.71, + "MTOPIntentClassification (hi)": 4.44, + "MTOPIntentClassification (th)": 4.67, + "MassiveIntentClassification (af)": 33.68, + "MassiveIntentClassification (am)": 2.94, + "MassiveIntentClassification (ar)": 10.04, + "MassiveIntentClassification (az)": 30.74, + "MassiveIntentClassification (bn)": 3.02, + "MassiveIntentClassification (cy)": 33.94, + "MassiveIntentClassification (da)": 38.47, + "MassiveIntentClassification (de)": 36.06, + "MassiveIntentClassification (el)": 27.7, + "MassiveIntentClassification (en)": 51.73, + "MassiveIntentClassification (es)": 35.6, + "MassiveIntentClassification (fa)": 17.97, + "MassiveIntentClassification (fi)": 35.53, + "MassiveIntentClassification (fr)": 38.41, + "MassiveIntentClassification (he)": 2.69, + "MassiveIntentClassification (hi)": 3.43, + "MassiveIntentClassification (hu)": 34.05, + "MassiveIntentClassification (hy)": 3.11, + "MassiveIntentClassification (id)": 40.02, + "MassiveIntentClassification (is)": 32.63, + "MassiveIntentClassification (it)": 39.28, + "MassiveIntentClassification (ja)": 4.95, + "MassiveIntentClassification (jv)": 34.95, + "MassiveIntentClassification (ka)": 2.57, + "MassiveIntentClassification (km)": 4.73, + "MassiveIntentClassification (kn)": 3.54, + "MassiveIntentClassification (ko)": 2.68, + "MassiveIntentClassification (lv)": 37.91, + "MassiveIntentClassification (ml)": 2.88, + "MassiveIntentClassification (mn)": 16.94, + "MassiveIntentClassification (ms)": 36.6, + "MassiveIntentClassification (my)": 3.96, + "MassiveIntentClassification (nb)": 34.75, + "MassiveIntentClassification (nl)": 33.95, + "MassiveIntentClassification (pl)": 35.77, + "MassiveIntentClassification (pt)": 43.05, + "MassiveIntentClassification (ro)": 36.2, + "MassiveIntentClassification (ru)": 25.3, + "MassiveIntentClassification (sl)": 35.9, + "MassiveIntentClassification (sq)": 36.6, + "MassiveIntentClassification (sv)": 36.0, + "MassiveIntentClassification (sw)": 34.81, + "MassiveIntentClassification (ta)": 3.11, + "MassiveIntentClassification (te)": 2.53, + "MassiveIntentClassification (th)": 4.38, + "MassiveIntentClassification (tl)": 35.51, + "MassiveIntentClassification (tr)": 32.02, + "MassiveIntentClassification (ur)": 9.61, + "MassiveIntentClassification (vi)": 37.07, + "MassiveIntentClassification (zh-CN)": 2.81, + "MassiveIntentClassification (zh-TW)": 4.79, + "MassiveScenarioClassification (af)": 36.17, + "MassiveScenarioClassification (am)": 7.64, + "MassiveScenarioClassification (ar)": 15.26, + "MassiveScenarioClassification (az)": 30.73, + "MassiveScenarioClassification (bn)": 7.15, + "MassiveScenarioClassification (cy)": 34.73, + "MassiveScenarioClassification (da)": 39.93, + "MassiveScenarioClassification (de)": 38.62, + "MassiveScenarioClassification (el)": 27.18, + "MassiveScenarioClassification (en)": 58.58, + "MassiveScenarioClassification (es)": 39.44, + "MassiveScenarioClassification (fa)": 21.43, + "MassiveScenarioClassification (fi)": 33.21, + "MassiveScenarioClassification (fr)": 40.26, + "MassiveScenarioClassification (he)": 7.42, + "MassiveScenarioClassification (hi)": 8.06, + "MassiveScenarioClassification (hu)": 34.54, + "MassiveScenarioClassification (hy)": 8.61, + "MassiveScenarioClassification (id)": 40.04, + "MassiveScenarioClassification (is)": 33.57, + "MassiveScenarioClassification (it)": 40.1, + "MassiveScenarioClassification (ja)": 9.96, + "MassiveScenarioClassification (jv)": 36.11, + "MassiveScenarioClassification (ka)": 7.13, + "MassiveScenarioClassification (km)": 9.66, + "MassiveScenarioClassification (kn)": 7.55, + "MassiveScenarioClassification (ko)": 7.27, + "MassiveScenarioClassification (lv)": 37.03, + "MassiveScenarioClassification (ml)": 7.22, + "MassiveScenarioClassification (mn)": 21.53, + "MassiveScenarioClassification (ms)": 37.57, + "MassiveScenarioClassification (my)": 9.54, + "MassiveScenarioClassification (nb)": 35.71, + "MassiveScenarioClassification (nl)": 34.62, + "MassiveScenarioClassification (pl)": 36.87, + "MassiveScenarioClassification (pt)": 44.68, + "MassiveScenarioClassification (ro)": 37.29, + "MassiveScenarioClassification (ru)": 28.16, + "MassiveScenarioClassification (sl)": 37.95, + "MassiveScenarioClassification (sq)": 37.82, + "MassiveScenarioClassification (sv)": 35.35, + "MassiveScenarioClassification (sw)": 35.37, + "MassiveScenarioClassification (ta)": 7.19, + "MassiveScenarioClassification (te)": 7.29, + "MassiveScenarioClassification (th)": 9.47, + "MassiveScenarioClassification (tl)": 37.31, + "MassiveScenarioClassification (tr)": 34.57, + "MassiveScenarioClassification (ur)": 16.17, + "MassiveScenarioClassification (vi)": 35.91, + "MassiveScenarioClassification (zh-CN)": 9.19, + "MassiveScenarioClassification (zh-TW)": 10.19, + "ToxicConversationsClassification": 57.44, + "TweetSentimentExtractionClassification": 45.52 } ] }, "Clustering": { "v_measure": [ { - "Model": "monobert-large-msmarco" + "Model": "allenai-specter", + "ArxivClusteringP2P": 44.75, + "ArxivClusteringS2S": 35.27, + "BiorxivClusteringP2P": 39.52, + "BiorxivClusteringS2S": 34.53, + "MedrxivClusteringP2P": 35.04, + "MedrxivClusteringS2S": 31.66, + "RedditClustering": 24.13, + "RedditClusteringP2P": 35.06, + "StackExchangeClustering": 39.01, + "StackExchangeClusteringP2P": 31.46, + "TwentyNewsgroupsClustering": 24.22 } ] }, "PairClassification": { "ap": [ { - "Model": "monobert-large-msmarco" + "Model": "allenai-specter", + "SprintDuplicateQuestions": 71.63, + "TwitterSemEval2015": 43.25, + "TwitterURLCorpus": 69.22 } ] }, "Reranking": { "map": [ { - "Model": "monobert-large-msmarco" + "Model": "allenai-specter", + "AskUbuntuDupQuestions": 50.07, + "MindSmallReranking": 24.8, + "SciDocsRR": 81.31, + "StackOverflowDupQuestions": 36.22 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "monobert-large-msmarco" + "Model": "allenai-specter", + "ArguAna": 32.67, + "CQADupstackRetrieval": 14.6, + "ClimateFEVER": 6.86, + "DBPedia": 4.14, + "FEVER": 5.45, + "FiQA2018": 5.64, + "HotpotQA": 5.46, + "MSMARCO": 5.59, + "NFCorpus": 0.85, + "NQ": 5.99, + "QuoraRetrieval": 64.65, + "SCIDOCS": 0.0, + "SciFact": 47.88, + "TRECCOVID": 29.91, + "Touche2020": 8.46 } ] }, "STS": { "spearman": [ { - "Model": "monobert-large-msmarco" + "Model": "allenai-specter", + "BIOSSES": 64.95, + "SICK-R": 56.39, + "STS12": 62.49, + "STS13": 58.7, + "STS14": 54.87, + "STS15": 62.54, + "STS16": 64.27, + "STS17 (ar-ar)": 27.14, + "STS17 (en-ar)": 6.9, + "STS17 (en-de)": 11.59, + "STS17 (en-en)": 69.63, + "STS17 (en-tr)": 6.46, + "STS17 (es-en)": 10.86, + "STS17 (es-es)": 55.45, + "STS17 (fr-en)": 16.02, + "STS17 (it-en)": 19.87, + "STS17 (ko-ko)": 8.08, + "STS17 (nl-en)": 24.92, + "STS22 (ar)": 19.57, + "STS22 (de)": 17.31, + "STS22 (de-en)": 26.03, + "STS22 (de-fr)": 10.26, + "STS22 (de-pl)": 16.94, + "STS22 (en)": 55.06, + "STS22 (es)": 48.89, + "STS22 (es-en)": 51.79, + "STS22 (es-it)": 25.24, + "STS22 (fr)": 53.92, + "STS22 (fr-pl)": 39.44, + "STS22 (it)": 39.43, + "STS22 (pl)": 13.56, + "STS22 (pl-en)": 25.36, + "STS22 (ru)": 1.11, + "STS22 (tr)": 31.73, + "STS22 (zh)": 16.35, + "STS22 (zh-en)": 8.44, + "STSBenchmark": 61.26 } ] }, "Summarization": { "spearman": [ { - "Model": "monobert-large-msmarco" + "Model": "allenai-specter", + "SummEval": 27.66 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "monobert-large-msmarco", - "Core17InstructionRetrieval": -0.24, - "News21InstructionRetrieval": -0.8, - "Robust04InstructionRetrieval": -9.36 + "Model": "allenai-specter" } ] } }, - "bm25s": { + "distilrubert-small-cased-conversational": { "BitextMining": { "f1": [ { - "Model": "bm25s" + "Model": "distilrubert-small-cased-conversational", + "Tatoeba (rus-Cyrl_eng-Latn)": 24.16 } ] }, "Classification": { "accuracy": [ { - "Model": "bm25s" + "Model": "distilrubert-small-cased-conversational", + "GeoreviewClassification (rus-Cyrl)": 38.95, + "HeadlineClassification (rus-Cyrl)": 75.59, + "InappropriatenessClassification (rus-Cyrl)": 60.68, + "KinopoiskClassification (rus-Cyrl)": 49.67, + "MassiveIntentClassification (rus-Cyrl)": 63.12, + "MassiveScenarioClassification (rus-Cyrl)": 68.08, + "RuReviewsClassification (rus-Cyrl)": 54.05, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 48.53, + "RuSciBenchOECDClassification (rus-Cyrl)": 37.65 } ] }, "Clustering": { "v_measure": [ { - "Model": "bm25s" + "Model": "distilrubert-small-cased-conversational", + "GeoreviewClusteringP2P (rus-Cyrl)": 43.26, + "MLSUMClusteringP2P (rus-Cyrl)": 50.08, + "MLSUMClusteringS2S (rus-Cyrl)": 51.12, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 37.84, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 34.12 } ] }, "PairClassification": { "ap": [ { - "Model": "bm25s" + "Model": "distilrubert-small-cased-conversational", + "OpusparcusPC (rus-Cyrl)": 84.35, + "TERRa (rus-Cyrl)": 52.48 } ] }, "Reranking": { "map": [ { - "Model": "bm25s" + "Model": "distilrubert-small-cased-conversational", + "RuBQReranking (rus-Cyrl)": 42.58 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "bm25s", - "ArguAna": 49.28, - "CQADupstackRetrieval": 31.86, - "ClimateFEVER": 13.62, - "DBPedia": 29.91, - "FEVER": 48.09, - "FiQA2018": 25.14, - "HotpotQA": 56.91, - "MSMARCO": 21.89, - "NFCorpus": 32.08, - "NQ": 28.5, - "QuoraRetrieval": 80.42, - "SCIDOCS": 15.78, - "SciFact": 68.7, - "TRECCOVID": 62.31, - "Touche2020": 33.05 + "Model": "distilrubert-small-cased-conversational", + "RiaNewsRetrieval (rus-Cyrl)": 4.14, + "RuBQRetrieval (rus-Cyrl)": 10.6 } ] }, "STS": { "spearman": [ { - "Model": "bm25s" + "Model": "distilrubert-small-cased-conversational", + "RUParaPhraserSTS (rus-Cyrl)": 55.01, + "RuSTSBenchmarkSTS (rus-Cyrl)": 61.72, + "STS22 (rus-Cyrl)": 51.87, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 61.6 } ] }, "Summarization": { "spearman": [ { - "Model": "bm25s" + "Model": "distilrubert-small-cased-conversational" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "bm25s" + "Model": "distilrubert-small-cased-conversational" } ] } }, - "gottbert-base": { + "text-search-davinci-001": { "BitextMining": { "f1": [ { - "Model": "gottbert-base" + "Model": "text-search-davinci-001" } ] }, "Classification": { "accuracy": [ { - "Model": "gottbert-base" + "Model": "text-search-davinci-001" } ] }, "Clustering": { "v_measure": [ { - "Model": "gottbert-base", - "BlurbsClusteringP2P": 34.49, - "BlurbsClusteringS2S": 8.37, - "TenKGnadClusteringP2P": 33.66, - "TenKGnadClusteringS2S": 9.34 + "Model": "text-search-davinci-001" } ] }, "PairClassification": { "ap": [ { - "Model": "gottbert-base" + "Model": "text-search-davinci-001" } ] }, "Reranking": { "map": [ { - "Model": "gottbert-base" + "Model": "text-search-davinci-001" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "gottbert-base" + "Model": "text-search-davinci-001", + "ArguAna": 43.5, + "ClimateFEVER": 22.3, + "FEVER": 77.5, + "FiQA2018": 51.2, + "HotpotQA": 68.8, + "NFCorpus": 40.7, + "QuoraRetrieval": 63.8, + "SciFact": 75.4, + "TRECCOVID": 64.9, + "Touche2020": 29.1 } ] }, "STS": { "spearman": [ { - "Model": "gottbert-base" + "Model": "text-search-davinci-001" } ] }, "Summarization": { "spearman": [ { - "Model": "gottbert-base" + "Model": "text-search-davinci-001" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "gottbert-base" + "Model": "text-search-davinci-001" } ] } }, - "text2vec-base-chinese": { + "bge-small-zh-v1.5": { "BitextMining": { "f1": [ { - "Model": "text2vec-base-chinese" + "Model": "bge-small-zh-v1.5" } ] }, "Classification": { "accuracy": [ { - "Model": "text2vec-base-chinese", - "AmazonReviewsClassification (zh)": 34.12, - "IFlyTek": 42.05, - "JDReview": 82.14, - "MassiveIntentClassification (zh-CN)": 63.98, - "MassiveScenarioClassification (zh-CN)": 70.52, - "MultilingualSentiment": 60.98, - "OnlineShopping": 85.69, - "TNews": 43.01, - "Waimai": 77.22 + "Model": "bge-small-zh-v1.5", + "AmazonReviewsClassification (zh)": 35.91, + "IFlyTek": 45.49, + "JDReview": 80.04, + "MassiveIntentClassification (zh-CN)": 63.95, + "MassiveScenarioClassification (zh-CN)": 70.8, + "MultilingualSentiment": 63.06, + "OnlineShopping": 85.05, + "TNews": 48.15, + "Waimai": 83.18 } ] }, "Clustering": { "v_measure": [ { - "Model": "text2vec-base-chinese", - "CLSClusteringP2P": 35.27, - "CLSClusteringS2S": 32.42, - "ThuNewsClusteringP2P": 42.92, - "ThuNewsClusteringS2S": 40.01 + "Model": "bge-small-zh-v1.5", + "CLSClusteringP2P": 38.14, + "CLSClusteringS2S": 35.14, + "ThuNewsClusteringP2P": 54.22, + "ThuNewsClusteringS2S": 49.22 } ] }, "PairClassification": { "ap": [ { - "Model": "text2vec-base-chinese", - "Cmnli": 73.87, - "Ocnli": 60.95 + "Model": "bge-small-zh-v1.5", + "Cmnli": 76.24, + "Ocnli": 64.57 } ] }, "Reranking": { "map": [ { - "Model": "text2vec-base-chinese", - "CMedQAv1": 59.26, - "CMedQAv2": 59.82, - "MMarcoReranking": 12.76, - "T2Reranking": 65.95 + "Model": "bge-small-zh-v1.5", + "CMedQAv1": 77.4, + "CMedQAv2": 79.86, + "MMarcoReranking": 20.5, + "T2Reranking": 65.9 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "text2vec-base-chinese", - "CmedqaRetrieval": 15.91, - "CovidRetrieval": 44.81, - "DuRetrieval": 52.23, - "EcomRetrieval": 34.6, - "MMarcoRetrieval": 44.06, - "MedicalRetrieval": 27.56, - "T2Retrieval": 51.67, - "VideoRetrieval": 39.52 + "Model": "bge-small-zh-v1.5", + "CmedqaRetrieval": 35.11, + "CovidRetrieval": 70.14, + "DuRetrieval": 77.28, + "EcomRetrieval": 55.71, + "MMarcoRetrieval": 63.48, + "MedicalRetrieval": 49.8, + "T2Retrieval": 76.43, + "VideoRetrieval": 66.19 } ] }, "STS": { "spearman": [ { - "Model": "text2vec-base-chinese", - "AFQMC": 26.06, - "ATEC": 31.93, - "BQ": 42.67, - "LCQMC": 70.16, - "PAWSX": 17.21, - "QBQTC": 24.62, - "STS22 (zh)": 55.35, - "STSB": 79.3 + "Model": "bge-small-zh-v1.5", + "AFQMC": 33.42, + "ATEC": 43.01, + "BQ": 55.22, + "LCQMC": 72.19, + "PAWSX": 9.26, + "QBQTC": 35.29, + "STS22 (zh)": 67.72, + "STSB": 76.73 } ] }, "Summarization": { "spearman": [ { - "Model": "text2vec-base-chinese" + "Model": "bge-small-zh-v1.5" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "text2vec-base-chinese" + "Model": "bge-small-zh-v1.5" } ] } }, - "voyage-lite-02-instruct": { + "bge-large-zh-v1.5": { "BitextMining": { "f1": [ { - "Model": "voyage-lite-02-instruct" + "Model": "bge-large-zh-v1.5" } ] }, "Classification": { "accuracy": [ { - "Model": "voyage-lite-02-instruct", - "AmazonCounterfactualClassification (en)": 88.31, - "AmazonPolarityClassification": 96.32, - "AmazonReviewsClassification (en)": 56.25, - "Banking77Classification": 88.59, - "EmotionClassification": 50.28, - "ImdbClassification": 95.75, - "MTOPDomainClassification (en)": 97.65, - "MTOPIntentClassification (en)": 75.16, - "MassiveIntentClassification (en)": 73.97, - "MassiveScenarioClassification (en)": 83.99, - "ToxicConversationsClassification": 81.75, - "TweetSentimentExtractionClassification": 62.98 + "Model": "bge-large-zh-v1.5", + "AmazonReviewsClassification (zh)": 41.38, + "IFlyTek": 48.74, + "JDReview": 85.14, + "MassiveIntentClassification (zh-CN)": 68.84, + "MassiveScenarioClassification (zh-CN)": 74.7, + "MultilingualSentiment": 72.97, + "OnlineShopping": 91.43, + "TNews": 52.1, + "Waimai": 86.9 } ] }, "Clustering": { "v_measure": [ { - "Model": "voyage-lite-02-instruct", - "ArxivClusteringP2P": 51.95, - "ArxivClusteringS2S": 42.48, - "BiorxivClusteringP2P": 50.15, - "BiorxivClusteringS2S": 42.84, - "MedrxivClusteringP2P": 47.24, - "MedrxivClusteringS2S": 43.48, - "RedditClustering": 63.73, - "RedditClusteringP2P": 64.09, - "StackExchangeClustering": 70.71, - "StackExchangeClusteringP2P": 40.34, - "TwentyNewsgroupsClustering": 59.56 + "Model": "bge-large-zh-v1.5", + "CLSClusteringP2P": 41.44, + "CLSClusteringS2S": 38.33, + "ThuNewsClusteringP2P": 59.61, + "ThuNewsClusteringS2S": 56.58 } ] }, "PairClassification": { "ap": [ { - "Model": "voyage-lite-02-instruct", - "SprintDuplicateQuestions": 98.07, - "TwitterSemEval2015": 74.44, - "TwitterURLCorpus": 88.11 + "Model": "bge-large-zh-v1.5", + "Cmnli": 85.27, + "Ocnli": 77.94 } ] }, "Reranking": { "map": [ { - "Model": "voyage-lite-02-instruct", - "AskUbuntuDupQuestions": 63.24, - "MindSmallReranking": 31.48, - "SciDocsRR": 84.68, - "StackOverflowDupQuestions": 53.56 + "Model": "bge-large-zh-v1.5", + "CMedQAv1": 83.45, + "CMedQAv2": 85.44, + "MMarcoReranking": 28.74, + "T2Reranking": 65.74 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "voyage-lite-02-instruct", - "ArguAna": 70.28, - "CQADupstackRetrieval": 46.2, - "ClimateFEVER": 31.95, - "DBPedia": 39.79, - "FEVER": 91.35, - "FiQA2018": 52.51, - "HotpotQA": 75.51, - "MSMARCO": 37.93, - "NFCorpus": 43.7, - "NQ": 64.26, - "QuoraRetrieval": 87.62, - "SCIDOCS": 20.24, - "SciFact": 79.91, - "TRECCOVID": 81.02, - "Touche2020": 26.8 + "Model": "bge-large-zh-v1.5", + "CmedqaRetrieval": 42.57, + "CovidRetrieval": 73.35, + "DuRetrieval": 86.32, + "EcomRetrieval": 65.33, + "MMarcoRetrieval": 79.23, + "MedicalRetrieval": 59.59, + "T2Retrieval": 83.99, + "VideoRetrieval": 73.32 } ] }, "STS": { "spearman": [ { - "Model": "voyage-lite-02-instruct", - "BIOSSES": 89.7, - "SICK-R": 78.44, - "STS12": 86.46, - "STS13": 87.76, - "STS14": 86.6, - "STS15": 90.1, - "STS16": 86.39, - "STS17 (en-en)": 86.98, - "STS22 (en)": 76.89, - "STSBenchmark": 88.56 + "Model": "bge-large-zh-v1.5", + "AFQMC": 44.36, + "ATEC": 49.54, + "BQ": 62.94, + "LCQMC": 74.33, + "PAWSX": 33.92, + "QBQTC": 37.29, + "STS22 (zh)": 68.94, + "STSB": 78.7 } ] }, "Summarization": { "spearman": [ { - "Model": "voyage-lite-02-instruct", - "SummEval": 31.01 + "Model": "bge-large-zh-v1.5" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "voyage-lite-02-instruct" + "Model": "bge-large-zh-v1.5" } ] } }, - "voyage-lite-01-instruct": { + "sentence-camembert-large": { "BitextMining": { "f1": [ { - "Model": "voyage-lite-01-instruct" + "Model": "sentence-camembert-large" } ] }, "Classification": { "accuracy": [ { - "Model": "voyage-lite-01-instruct", - "AmazonCounterfactualClassification (en)": 71.43, - "AmazonPolarityClassification": 96.41, - "AmazonReviewsClassification (en)": 57.06, - "Banking77Classification": 81.64, - "EmotionClassification": 48.29, - "ImdbClassification": 95.49, - "MTOPDomainClassification (en)": 96.3, - "MTOPIntentClassification (en)": 67.93, - "MassiveIntentClassification (en)": 71.29, - "MassiveScenarioClassification (en)": 76.74, - "ToxicConversationsClassification": 75.45, - "TweetSentimentExtractionClassification": 59.44 + "Model": "sentence-camembert-large", + "AmazonReviewsClassification (fr)": 37.97, + "MTOPDomainClassification (fr)": 85.74, + "MTOPIntentClassification (fr)": 58.62, + "MasakhaNEWSClassification (fra)": 80.62, + "MassiveIntentClassification (fr)": 62.65, + "MassiveScenarioClassification (fr)": 69.29 } ] }, "Clustering": { "v_measure": [ { - "Model": "voyage-lite-01-instruct", - "ArxivClusteringP2P": 47.92, - "ArxivClusteringS2S": 42.42, - "BiorxivClusteringP2P": 38.72, - "BiorxivClusteringS2S": 36.6, - "MedrxivClusteringP2P": 34.04, - "MedrxivClusteringS2S": 32.81, - "RedditClustering": 61.56, - "RedditClusteringP2P": 65.35, - "StackExchangeClustering": 70.16, - "StackExchangeClusteringP2P": 38.23, - "TwentyNewsgroupsClustering": 53.56 + "Model": "sentence-camembert-large", + "AlloProfClusteringP2P": 62.69, + "AlloProfClusteringS2S": 42.06, + "HALClusteringS2S": 23.9, + "MLSUMClusteringP2P": 42.04, + "MLSUMClusteringS2S": 32.29, + "MasakhaNEWSClusteringP2P (fra)": 54.51, + "MasakhaNEWSClusteringS2S (fra)": 44.73 } ] }, "PairClassification": { - "ap": [ - { - "Model": "voyage-lite-01-instruct", - "SprintDuplicateQuestions": 96.01, - "TwitterSemEval2015": 76.87, - "TwitterURLCorpus": 86.84 + "ap": [ + { + "Model": "sentence-camembert-large", + "OpusparcusPC (fr)": 94.63, + "PawsXPairClassification (fr)": 59.59 } ] }, "Reranking": { "map": [ { - "Model": "voyage-lite-01-instruct", - "AskUbuntuDupQuestions": 65.77, - "MindSmallReranking": 31.69, - "SciDocsRR": 87.03, - "StackOverflowDupQuestions": 54.49 + "Model": "sentence-camembert-large", + "AlloprofReranking": 57.62, + "SyntecReranking": 88.15 } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "voyage-lite-01-instruct", - "ArguAna": 58.73, - "CQADupstackRetrieval": 45.11, - "ClimateFEVER": 37.47, - "DBPedia": 43.42, - "FEVER": 89.71, - "FiQA2018": 44.79, - "HotpotQA": 70.46, - "MSMARCO": 39.66, - "NFCorpus": 43.33, - "NQ": 60.65, - "QuoraRetrieval": 87.83, - "SCIDOCS": 23.19, - "SciFact": 73.64, - "TRECCOVID": 78.92, - "Touche2020": 36.83 + "Model": "sentence-camembert-large", + "AlloprofRetrieval": 31.62, + "BSARDRetrieval": 0.0, + "MintakaRetrieval (fr)": 21.87, + "SyntecRetrieval": 81.11, + "XPQARetrieval (fr)": 65.62 } ] }, "STS": { "spearman": [ { - "Model": "voyage-lite-01-instruct", - "BIOSSES": 84.85, - "SICK-R": 79.71, - "STS12": 77.09, - "STS13": 88.91, - "STS14": 82.08, - "STS15": 89.21, - "STS16": 84.74, - "STS17 (en-en)": 90.73, - "STS22 (en)": 62.1, - "STSBenchmark": 89.86 + "Model": "sentence-camembert-large", + "SICKFr": 77.7, + "STS22 (fr)": 81.73, + "STSBenchmarkMultilingualSTS (fr)": 85.79 } ] }, "Summarization": { "spearman": [ { - "Model": "voyage-lite-01-instruct", - "SummEval": 30.97 + "Model": "sentence-camembert-large", + "SummEvalFr": 30.88 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "voyage-lite-01-instruct" + "Model": "sentence-camembert-large" } ] } }, - "silver-retriever-base-v1": { + "multilingual-e5-base": { "BitextMining": { "f1": [ { - "Model": "silver-retriever-base-v1" + "Model": "multilingual-e5-base", + "BornholmBitextMining (dan-Latn)": 33.22, + "BornholmBitextMining": 46.4, + "Tatoeba (kzj-Latn_eng-Latn)": 6.26, + "Tatoeba (ina-Latn_eng-Latn)": 86.11, + "Tatoeba (bre-Latn_eng-Latn)": 5.44, + "Tatoeba (kab-Latn_eng-Latn)": 21.77, + "Tatoeba (ind-Latn_eng-Latn)": 90.26, + "Tatoeba (mkd-Cyrl_eng-Latn)": 73.76, + "Tatoeba (yue-Hant_eng-Latn)": 80.66, + "Tatoeba (amh-Ethi_eng-Latn)": 74.93, + "Tatoeba (ceb-Latn_eng-Latn)": 45.46, + "Tatoeba (lit-Latn_eng-Latn)": 75.53, + "Tatoeba (nds-Latn_eng-Latn)": 53.86, + "Tatoeba (kur-Latn_eng-Latn)": 52.96, + "Tatoeba (bel-Cyrl_eng-Latn)": 86.7, + "Tatoeba (ile-Latn_eng-Latn)": 72.56, + "Tatoeba (oci-Latn_eng-Latn)": 35.79, + "Tatoeba (heb-Hebr_eng-Latn)": 74.26, + "Tatoeba (mhr-Cyrl_eng-Latn)": 5.52, + "Tatoeba (afr-Latn_eng-Latn)": 87.04, + "Tatoeba (uig-Arab_eng-Latn)": 62.97, + "Tatoeba (mar-Deva_eng-Latn)": 86.62, + "Tatoeba (fry-Latn_eng-Latn)": 50.82, + "Tatoeba (tat-Cyrl_eng-Latn)": 66.92, + "Tatoeba (khm-Khmr_eng-Latn)": 47.27, + "Tatoeba (dtp-Latn_eng-Latn)": 5.13, + "Tatoeba (ben-Beng_eng-Latn)": 81.05, + "Tatoeba (ido-Latn_eng-Latn)": 74.41, + "Tatoeba (cha-Latn_eng-Latn)": 16.95, + "Tatoeba (zsm-Latn_eng-Latn)": 92.45, + "Tatoeba (pes-Arab_eng-Latn)": 87.18, + "Tatoeba (hye-Armn_eng-Latn)": 85.85, + "Tatoeba (cat-Latn_eng-Latn)": 84.09, + "Tatoeba (cym-Latn_eng-Latn)": 65.69, + "Tatoeba (aze-Latn_eng-Latn)": 84.71, + "Tatoeba (yid-Hebr_eng-Latn)": 63.2, + "Tatoeba (swg-Latn_eng-Latn)": 42.33, + "Tatoeba (war-Latn_eng-Latn)": 47.18, + "Tatoeba (swe-Latn_eng-Latn)": 91.33, + "Tatoeba (slk-Latn_eng-Latn)": 86.42, + "Tatoeba (gla-Latn_eng-Latn)": 43.08, + "Tatoeba (xho-Latn_eng-Latn)": 73.24, + "Tatoeba (dan-Latn_eng-Latn)": 91.23, + "Tatoeba (ara-Arab_eng-Latn)": 82.86, + "Tatoeba (ast-Latn_eng-Latn)": 74.36, + "Tatoeba (hrv-Latn_eng-Latn)": 92.5, + "Tatoeba (nob-Latn_eng-Latn)": 95.9, + "Tatoeba (eus-Latn_eng-Latn)": 56.26, + "Tatoeba (kaz-Cyrl_eng-Latn)": 75.56, + "Tatoeba (tuk-Latn_eng-Latn)": 19.67, + "Tatoeba (pam-Latn_eng-Latn)": 6.92, + "Tatoeba (gsw-Latn_eng-Latn)": 43.53, + "Tatoeba (slv-Latn_eng-Latn)": 81.93, + "Tatoeba (dsb-Latn_eng-Latn)": 34.36, + "Tatoeba (cor-Latn_eng-Latn)": 4.38, + "Tatoeba (ces-Latn_eng-Latn)": 88.75, + "Tatoeba (tam-Taml_eng-Latn)": 85.12, + "Tatoeba (glg-Latn_eng-Latn)": 82.69, + "Tatoeba (bul-Cyrl_eng-Latn)": 88.95, + "Tatoeba (deu-Latn_eng-Latn)": 97.07, + "Tatoeba (fin-Latn_eng-Latn)": 86.15, + "Tatoeba (csb-Latn_eng-Latn)": 24.29, + "Tatoeba (urd-Arab_eng-Latn)": 86.2, + "Tatoeba (est-Latn_eng-Latn)": 70.64, + "Tatoeba (wuu-Hans_eng-Latn)": 78.65, + "Tatoeba (tha-Thai_eng-Latn)": 94.22, + "Tatoeba (spa-Latn_eng-Latn)": 96.97, + "Tatoeba (ukr-Cyrl_eng-Latn)": 88.29, + "Tatoeba (awa-Deva_eng-Latn)": 68.39, + "Tatoeba (mal-Mlym_eng-Latn)": 96.72, + "Tatoeba (cbk-Latn_eng-Latn)": 60.66, + "Tatoeba (hsb-Latn_eng-Latn)": 40.36, + "Tatoeba (tzl-Latn_eng-Latn)": 34.44, + "Tatoeba (gle-Latn_eng-Latn)": 58.62, + "Tatoeba (orv-Cyrl_eng-Latn)": 16.0, + "Tatoeba (isl-Latn_eng-Latn)": 76.9, + "Tatoeba (jav-Latn_eng-Latn)": 61.25, + "Tatoeba (fao-Latn_eng-Latn)": 64.72, + "Tatoeba (pol-Latn_eng-Latn)": 94.57, + "Tatoeba (max-Deva_eng-Latn)": 52.4, + "Tatoeba (bos-Latn_eng-Latn)": 88.86, + "Tatoeba (hun-Latn_eng-Latn)": 84.41, + "Tatoeba (rus-Cyrl_eng-Latn)": 91.78, + "Tatoeba (arq-Arab_eng-Latn)": 26.61, + "Tatoeba (kor-Hang_eng-Latn)": 83.37, + "Tatoeba (uzb-Latn_eng-Latn)": 62.63, + "Tatoeba (pms-Latn_eng-Latn)": 44.61, + "Tatoeba (ell-Grek_eng-Latn)": 89.96, + "Tatoeba (swh-Latn_eng-Latn)": 66.81, + "Tatoeba (epo-Latn_eng-Latn)": 92.07, + "Tatoeba (jpn-Jpan_eng-Latn)": 90.3, + "Tatoeba (tel-Telu_eng-Latn)": 88.49, + "Tatoeba (srp-Cyrl_eng-Latn)": 89.08, + "Tatoeba (nov-Latn_eng-Latn)": 66.96, + "Tatoeba (cmn-Hans_eng-Latn)": 93.35, + "Tatoeba (tgl-Latn_eng-Latn)": 83.78, + "Tatoeba (ber-Tfng_eng-Latn)": 23.59, + "Tatoeba (sqi-Latn_eng-Latn)": 90.06, + "Tatoeba (ang-Latn_eng-Latn)": 29.87, + "Tatoeba (ita-Latn_eng-Latn)": 90.61, + "Tatoeba (por-Latn_eng-Latn)": 92.74, + "Tatoeba (mon-Cyrl_eng-Latn)": 78.37, + "Tatoeba (fra-Latn_eng-Latn)": 92.76, + "Tatoeba (lat-Latn_eng-Latn)": 39.62, + "Tatoeba (nno-Latn_eng-Latn)": 82.67, + "Tatoeba (arz-Arab_eng-Latn)": 66.79, + "Tatoeba (hin-Deva_eng-Latn)": 93.13, + "Tatoeba (nld-Latn_eng-Latn)": 93.2, + "Tatoeba (kat-Geor_eng-Latn)": 77.83, + "Tatoeba (lfn-Latn_eng-Latn)": 52.85, + "Tatoeba (lvs-Latn_eng-Latn)": 76.76, + "Tatoeba (tur-Latn_eng-Latn)": 92.54, + "Tatoeba (ron-Latn_eng-Latn)": 91.27, + "Tatoeba (vie-Latn_eng-Latn)": 94.55 } ] }, "Classification": { "accuracy": [ { - "Model": "silver-retriever-base-v1", - "AllegroReviews": 33.35, - "CBD": 68.51, - "MassiveIntentClassification (pl)": 66.63, - "MassiveScenarioClassification (pl)": 69.97, - "PAC": 66.26, - "PolEmo2.0-IN": 63.52, - "PolEmo2.0-OUT": 44.7 + "Model": "multilingual-e5-base", + "AllegroReviews (pol-Latn)": 40.78, + "AllegroReviews": 40.85, + "AmazonCounterfactualClassification (en-ext)": 76.91, + "AmazonCounterfactualClassification (en)": 77.36, + "AmazonCounterfactualClassification (deu-Latn)": 70.81, + "AmazonCounterfactualClassification (jpn-Jpan)": 72.02, + "AmazonPolarityClassification": 91.76, + "AmazonReviewsClassification (en)": 47.54, + "AmazonReviewsClassification (deu-Latn)": 44.37, + "AmazonReviewsClassification (spa-Latn)": 43.38, + "AmazonReviewsClassification (fra-Latn)": 41.55, + "AmazonReviewsClassification (jpn-Jpan)": 39.57, + "AmazonReviewsClassification (cmn-Hans)": 38.34, + "AmazonReviewsClassification (fr)": 40.94, + "AngryTweetsClassification (dan-Latn)": 56.28, + "AngryTweetsClassification": 54.65, + "Banking77Classification": 73.53, + "CBD (pol-Latn)": 62.6, + "CBD": 62.66, + "DKHateClassification": 63.53, + "DanishPoliticalCommentsClassification (dan-Latn)": 36.41, + "DanishPoliticalCommentsClassification": 36.69, + "EmotionClassification": 45.68, + "GeoreviewClassification (rus-Cyrl)": 46.05, + "HeadlineClassification (rus-Cyrl)": 75.64, + "IFlyTek (cmn-Hans)": 40.81, + "IFlyTek": 44.93, + "ImdbClassification": 84.29, + "InappropriatenessClassification (rus-Cyrl)": 58.78, + "JDReview (cmn-Hans)": 75.72, + "JDReview": 76.21, + "KinopoiskClassification (rus-Cyrl)": 50.89, + "LccSentimentClassification (dan-Latn)": 60.13, + "LccSentimentClassification": 59.67, + "MTOPDomainClassification (en)": 90.9, + "MTOPDomainClassification (deu-Latn)": 87.94, + "MTOPDomainClassification (spa-Latn)": 85.96, + "MTOPDomainClassification (fra-Latn)": 82.88, + "MTOPDomainClassification (hin-Deva)": 83.92, + "MTOPDomainClassification (tha-Thai)": 83.94, + "MTOPDomainClassification (fr)": 84.79, + "MTOPIntentClassification (en)": 61.6, + "MTOPIntentClassification (deu-Latn)": 61.05, + "MTOPIntentClassification (spa-Latn)": 55.36, + "MTOPIntentClassification (fra-Latn)": 52.23, + "MTOPIntentClassification (hin-Deva)": 53.93, + "MTOPIntentClassification (tha-Thai)": 58.69, + "MTOPIntentClassification (fr)": 55.51, + "MasakhaNEWSClassification (amh-Ethi)": 83.8, + "MasakhaNEWSClassification (eng)": 76.49, + "MasakhaNEWSClassification (fra-Latn)": 76.35, + "MasakhaNEWSClassification (hau-Latn)": 74.63, + "MasakhaNEWSClassification (ibo-Latn)": 64.59, + "MasakhaNEWSClassification (lin-Latn)": 70.57, + "MasakhaNEWSClassification (lug-Latn)": 68.12, + "MasakhaNEWSClassification (orm-Ethi)": 71.75, + "MasakhaNEWSClassification (pcm-Latn)": 91.05, + "MasakhaNEWSClassification (run-Latn)": 73.35, + "MasakhaNEWSClassification (sna-Latn)": 84.17, + "MasakhaNEWSClassification (som-Latn)": 60.1, + "MasakhaNEWSClassification (swa-Latn)": 70.74, + "MasakhaNEWSClassification (tir-Ethi)": 67.1, + "MasakhaNEWSClassification (xho-Latn)": 76.03, + "MasakhaNEWSClassification (yor-Latn)": 72.75, + "MasakhaNEWSClassification (fra)": 79.69, + "MassiveIntentClassification (tha-Thai)": 59.63, + "MassiveIntentClassification (tam-Taml)": 48.93, + "MassiveIntentClassification (fin-Latn)": 58.91, + "MassiveIntentClassification (rus-Cyrl)": 62.78, + "MassiveIntentClassification (afr-Latn)": 49.82, + "MassiveIntentClassification (heb-Hebr)": 55.3, + "MassiveIntentClassification (sqi-Latn)": 51.07, + "MassiveIntentClassification (por-Latn)": 62.12, + "MassiveIntentClassification (hye-Armn)": 48.77, + "MassiveIntentClassification (cym-Latn)": 37.05, + "MassiveIntentClassification (deu-Latn)": 59.82, + "MassiveIntentClassification (fas-Arab)": 59.51, + "MassiveIntentClassification (hun-Latn)": 57.69, + "MassiveIntentClassification (urd-Arab)": 51.3, + "MassiveIntentClassification (cmo-Hant)": 56.4, + "MassiveIntentClassification (khm-Khmr)": 32.14, + "MassiveIntentClassification (tel-Telu)": 50.09, + "MassiveIntentClassification (vie-Latn)": 59.61, + "MassiveIntentClassification (kan-Knda)": 48.63, + "MassiveIntentClassification (ara-Arab)": 50.2, + "MassiveIntentClassification (mya-Mymr)": 46.67, + "MassiveIntentClassification (slv-Latn)": 53.84, + "MassiveIntentClassification (jpn-Jpan)": 62.3, + "MassiveIntentClassification (mon-Cyrl)": 46.8, + "MassiveIntentClassification (jav-Latn)": 43.23, + "MassiveIntentClassification (lav-Latn)": 51.17, + "MassiveIntentClassification (ron-Latn)": 56.83, + "MassiveIntentClassification (dan-Latn)": 60.69, + "MassiveIntentClassification (nob-Latn)": 60.06, + "MassiveIntentClassification (tgl-Latn)": 48.99, + "MassiveIntentClassification (aze-Latn)": 51.36, + "MassiveIntentClassification (ind-Latn)": 58.7, + "MassiveIntentClassification (amh-Ethi)": 42.4, + "MassiveIntentClassification (ben-Beng)": 51.69, + "MassiveIntentClassification (ell-Grek)": 58.07, + "MassiveIntentClassification (hin-Deva)": 56.75, + "MassiveIntentClassification (nld-Latn)": 61.23, + "MassiveIntentClassification (pol-Latn)": 60.98, + "MassiveIntentClassification (swe-Latn)": 62.43, + "MassiveIntentClassification (isl-Latn)": 44.52, + "MassiveIntentClassification (mal-Mlym)": 53.75, + "MassiveIntentClassification (msa-Latn)": 52.84, + "MassiveIntentClassification (kat-Geor)": 37.56, + "MassiveIntentClassification (tur-Latn)": 60.69, + "MassiveIntentClassification (kor-Kore)": 59.97, + "MassiveIntentClassification (ita-Latn)": 61.29, + "MassiveIntentClassification (cmo-Hans)": 63.22, + "MassiveIntentClassification (en)": 65.71, + "MassiveIntentClassification (fra-Latn)": 61.32, + "MassiveIntentClassification (swa-Latn)": 45.24, + "MassiveIntentClassification (spa-Latn)": 61.13, + "MassiveIntentClassification (da)": 60.16, + "MassiveIntentClassification (nb)": 59.83, + "MassiveIntentClassification (sv)": 61.78, + "MassiveIntentClassification (pl)": 61.04, + "MassiveScenarioClassification (ind-Latn)": 63.6, + "MassiveScenarioClassification (tha-Thai)": 67.37, + "MassiveScenarioClassification (cmo-Hans)": 70.24, + "MassiveScenarioClassification (ben-Beng)": 57.0, + "MassiveScenarioClassification (kan-Knda)": 53.49, + "MassiveScenarioClassification (tel-Telu)": 54.24, + "MassiveScenarioClassification (aze-Latn)": 55.15, + "MassiveScenarioClassification (ell-Grek)": 65.38, + "MassiveScenarioClassification (swa-Latn)": 52.64, + "MassiveScenarioClassification (hin-Deva)": 62.91, + "MassiveScenarioClassification (tur-Latn)": 65.18, + "MassiveScenarioClassification (dan-Latn)": 67.97, + "MassiveScenarioClassification (msa-Latn)": 58.35, + "MassiveScenarioClassification (mya-Mymr)": 50.77, + "MassiveScenarioClassification (mon-Cyrl)": 51.87, + "MassiveScenarioClassification (tgl-Latn)": 54.36, + "MassiveScenarioClassification (cmo-Hant)": 63.73, + "MassiveScenarioClassification (ara-Arab)": 58.0, + "MassiveScenarioClassification (slv-Latn)": 58.3, + "MassiveScenarioClassification (spa-Latn)": 66.47, + "MassiveScenarioClassification (urd-Arab)": 56.74, + "MassiveScenarioClassification (fin-Latn)": 64.94, + "MassiveScenarioClassification (tam-Taml)": 53.86, + "MassiveScenarioClassification (ron-Latn)": 63.5, + "MassiveScenarioClassification (hye-Armn)": 53.63, + "MassiveScenarioClassification (vie-Latn)": 66.35, + "MassiveScenarioClassification (deu-Latn)": 68.4, + "MassiveScenarioClassification (afr-Latn)": 58.95, + "MassiveScenarioClassification (en)": 71.57, + "MassiveScenarioClassification (fra-Latn)": 67.37, + "MassiveScenarioClassification (jpn-Jpan)": 69.89, + "MassiveScenarioClassification (nld-Latn)": 68.62, + "MassiveScenarioClassification (cym-Latn)": 43.84, + "MassiveScenarioClassification (heb-Hebr)": 62.53, + "MassiveScenarioClassification (pol-Latn)": 66.12, + "MassiveScenarioClassification (fas-Arab)": 63.92, + "MassiveScenarioClassification (lav-Latn)": 56.42, + "MassiveScenarioClassification (por-Latn)": 65.49, + "MassiveScenarioClassification (rus-Cyrl)": 68.21, + "MassiveScenarioClassification (mal-Mlym)": 59.89, + "MassiveScenarioClassification (hun-Latn)": 65.75, + "MassiveScenarioClassification (nob-Latn)": 66.57, + "MassiveScenarioClassification (kor-Kore)": 67.9, + "MassiveScenarioClassification (isl-Latn)": 53.28, + "MassiveScenarioClassification (khm-Khmr)": 38.45, + "MassiveScenarioClassification (sqi-Latn)": 57.92, + "MassiveScenarioClassification (jav-Latn)": 51.94, + "MassiveScenarioClassification (amh-Ethi)": 50.33, + "MassiveScenarioClassification (ita-Latn)": 66.17, + "MassiveScenarioClassification (kat-Geor)": 43.38, + "MassiveScenarioClassification (swe-Latn)": 69.35, + "MassiveScenarioClassification (da)": 67.46, + "MassiveScenarioClassification (nb)": 66.18, + "MassiveScenarioClassification (sv)": 69.15, + "MassiveScenarioClassification (pl)": 66.11, + "MultilingualSentiment (cmn-Hans)": 67.56, + "MultilingualSentiment": 65.28, + "NoRecClassification (nob-Latn)": 53.74, + "NoRecClassification": 57.58, + "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 75.85, + "NordicLangClassification": 75.94, + "NorwegianParliament": 59.94, + "OnlineShopping (cmn-Hans)": 88.66, + "OnlineShopping": 88.4, + "PAC (pol-Latn)": 70.87, + "PAC": 70.87, + "PolEmo2.0-IN (pol-Latn)": 67.59, + "PolEmo2.0-IN": 67.66, + "PolEmo2.0-OUT (pol-Latn)": 43.93, + "PolEmo2.0-OUT": 43.91, + "RuReviewsClassification (rus-Cyrl)": 62.99, + "RuSciBenchGRNTIClassification (rus-Cyrl)": 56.28, + "RuSciBenchOECDClassification (rus-Cyrl)": 42.69, + "ScalaDaClassification": 50.79, + "ScalaNbClassification": 50.32, + "TNews (cmn-Hans)": 47.52, + "TNews": 47.06, + "ToxicConversationsClassification": 64.33, + "TweetSentimentExtractionClassification": 62.8, + "Waimai (cmn-Hans)": 85.98, + "Waimai": 84.42 } ] }, "Clustering": { "v_measure": [ { - "Model": "silver-retriever-base-v1", - "8TagsClustering": 31.49 + "Model": "multilingual-e5-base", + "8TagsClustering": 24.97, + "AlloProfClusteringP2P": 62.09, + "AlloProfClusteringS2S": 32.98, + "ArxivClusteringP2P": 43.35, + "ArxivClusteringS2S": 36.0, + "BiorxivClusteringP2P": 37.55, + "BiorxivClusteringS2S": 30.33, + "CLSClusteringP2P": 32.41, + "CLSClusteringS2S": 36.99, + "GeoreviewClusteringP2P (rus-Cyrl)": 54.46, + "HALClusteringS2S": 22.48, + "MLSUMClusteringP2P (rus-Cyrl)": 43.47, + "MLSUMClusteringP2P": 43.48, + "MLSUMClusteringS2S (rus-Cyrl)": 40.87, + "MLSUMClusteringS2S": 38.53, + "MasakhaNEWSClusteringP2P (amh-Ethi)": 58.05, + "MasakhaNEWSClusteringP2P (eng)": 43.8, + "MasakhaNEWSClusteringP2P (fra-Latn)": 58.28, + "MasakhaNEWSClusteringP2P (hau-Latn)": 44.78, + "MasakhaNEWSClusteringP2P (ibo-Latn)": 44.97, + "MasakhaNEWSClusteringP2P (lin-Latn)": 48.08, + "MasakhaNEWSClusteringP2P (lug-Latn)": 50.15, + "MasakhaNEWSClusteringP2P (orm-Ethi)": 38.02, + "MasakhaNEWSClusteringP2P (pcm-Latn)": 71.03, + "MasakhaNEWSClusteringP2P (run-Latn)": 58.28, + "MasakhaNEWSClusteringP2P (sna-Latn)": 59.25, + "MasakhaNEWSClusteringP2P (som-Latn)": 37.27, + "MasakhaNEWSClusteringP2P (swa-Latn)": 34.54, + "MasakhaNEWSClusteringP2P (tir-Ethi)": 53.44, + "MasakhaNEWSClusteringP2P (xho-Latn)": 40.32, + "MasakhaNEWSClusteringP2P (yor-Latn)": 37.97, + "MasakhaNEWSClusteringP2P (fra)": 47.91, + "MasakhaNEWSClusteringS2S (amh-Ethi)": 49.38, + "MasakhaNEWSClusteringS2S (eng)": 45.76, + "MasakhaNEWSClusteringS2S (fra-Latn)": 55.43, + "MasakhaNEWSClusteringS2S (hau-Latn)": 16.11, + "MasakhaNEWSClusteringS2S (ibo-Latn)": 24.38, + "MasakhaNEWSClusteringS2S (lin-Latn)": 44.8, + "MasakhaNEWSClusteringS2S (lug-Latn)": 45.67, + "MasakhaNEWSClusteringS2S (orm-Ethi)": 26.41, + "MasakhaNEWSClusteringS2S (pcm-Latn)": 83.26, + "MasakhaNEWSClusteringS2S (run-Latn)": 48.77, + "MasakhaNEWSClusteringS2S (sna-Latn)": 43.9, + "MasakhaNEWSClusteringS2S (som-Latn)": 25.43, + "MasakhaNEWSClusteringS2S (swa-Latn)": 9.87, + "MasakhaNEWSClusteringS2S (tir-Ethi)": 51.66, + "MasakhaNEWSClusteringS2S (xho-Latn)": 29.65, + "MasakhaNEWSClusteringS2S (yor-Latn)": 30.12, + "MasakhaNEWSClusteringS2S (fra)": 51.16, + "MedrxivClusteringP2P": 30.6, + "MedrxivClusteringS2S": 28.73, + "RedditClustering": 43.15, + "RedditClusteringP2P": 61.69, + "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 51.56, + "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.78, + "StackExchangeClustering": 55.31, + "StackExchangeClusteringP2P": 33.51, + "ThuNewsClusteringP2P": 40.98, + "ThuNewsClusteringS2S": 52.36, + "TwentyNewsgroupsClustering": 35.55 } ] }, "PairClassification": { "ap": [ { - "Model": "silver-retriever-base-v1", - "CDSC-E": 67.35, - "PPC": 85.33, - "PSC": 98.46, - "SICK-E-PL": 58.19 + "Model": "multilingual-e5-base", + "CDSC-E (pol-Latn)": 72.7, + "CDSC-E": 72.67, + "Cmnli": 74.51, + "Ocnli": 59.63, + "OpusparcusPC (deu-Latn)": 95.83, + "OpusparcusPC (en)": 98.71, + "OpusparcusPC (fin-Latn)": 90.3, + "OpusparcusPC (fra-Latn)": 92.12, + "OpusparcusPC (rus-Cyrl)": 86.82, + "OpusparcusPC (swe-Latn)": 93.05, + "OpusparcusPC (fr)": 92.72, + "PPC": 88.01, + "PSC (pol-Latn)": 99.14, + "PSC": 99.14, + "PawsXPairClassification (deu-Latn)": 54.11, + "PawsXPairClassification (en)": 55.79, + "PawsXPairClassification (spa-Latn)": 54.13, + "PawsXPairClassification (fra-Latn)": 56.01, + "PawsXPairClassification (jpn-Hira)": 49.02, + "PawsXPairClassification (kor-Hang)": 51.01, + "PawsXPairClassification (cmn-Hans)": 55.13, + "PawsXPairClassification (fr)": 56.93, + "SICK-E-PL (pol-Latn)": 68.76, + "SICK-E-PL": 68.77, + "SprintDuplicateQuestions": 93.02, + "TERRa (rus-Cyrl)": 54.96, + "TwitterSemEval2015": 72.21, + "TwitterURLCorpus": 85.48 } ] }, "Reranking": { "map": [ { - "Model": "silver-retriever-base-v1" - } - ] - }, - "Retrieval": { - "ndcg_at_10": [ - { - "Model": "silver-retriever-base-v1", - "ArguAna-PL": 44.12, - "DBPedia-PL": 26.32, - "FiQA-PL": 24.95, - "HotpotQA-PL": 45.13, - "MSMARCO-PL": 25.47, - "NFCorpus-PL": 28.55, - "NQ-PL": 37.9, - "Quora-PL": 77.98, - "SCIDOCS-PL": 10.9, - "SciFact-PL": 54.44, - "TRECCOVID-PL": 46.98 + "Model": "multilingual-e5-base", + "AlloprofReranking (fra-Latn)": 65.9, + "AlloprofReranking": 58.1, + "AskUbuntuDupQuestions": 59.28, + "CMedQAv1": 65.21, + "CMedQAv2": 66.06, + "MMarcoReranking (cmn-Hans)": 30.52, + "MMarcoReranking": 21.76, + "MindSmallReranking": 29.28, + "RuBQReranking (rus-Cyrl)": 72.01, + "SciDocsRR": 81.81, + "StackOverflowDupQuestions": 49.75, + "SyntecReranking (fra-Latn)": 85.31, + "SyntecReranking": 85.43, + "T2Reranking (cmn-Hans)": 64.86, + "T2Reranking": 64.39 + } + ] + }, + "Retrieval": { + "ndcg_at_10": [ + { + "Model": "multilingual-e5-base", + "AILACasedocs": 26.05, + "AILAStatutes": 20.37, + "ARCChallenge": 9.61, + "AlloprofRetrieval (fra-Latn)": 34.45, + "AlloprofRetrieval": 36.21, + "AlphaNLI": 16.44, + "ArguAna": 44.21, + "ArguAna-PL (pol-Latn)": 42.86, + "ArguAna-PL": 42.81, + "BSARDRetrieval (fra-Latn)": 18.83, + "BSARDRetrieval": 0.0, + "CmedqaRetrieval (cmn-Hans)": 27.2, + "CmedqaRetrieval": 27.2, + "CovidRetrieval (cmn-Hans)": 73.48, + "CovidRetrieval": 73.45, + "DBPedia-PL": 30.23, + "DuRetrieval (cmn-Hans)": 81.66, + "DuRetrieval": 81.64, + "EcomRetrieval (cmn-Hans)": 54.01, + "EcomRetrieval": 54.17, + "FiQA-PL (pol-Latn)": 25.59, + "FiQA-PL": 25.52, + "FiQA2018": 38.15, + "GerDaLIRSmall (deu-Latn)": 15.3, + "HellaSwag": 24.79, + "HotpotQA-PL": 63.52, + "LEMBNarrativeQARetrieval": 23.6, + "LEMBNeedleRetrieval": 32.0, + "LEMBPasskeyRetrieval": 38.25, + "LEMBQMSumRetrieval": 25.16, + "LEMBSummScreenFDRetrieval": 68.21, + "LEMBWikimQARetrieval": 56.04, + "LeCaRDv2 (zho-Hans)": 59.0, + "LegalBenchConsumerContractsQA": 69.02, + "LegalBenchCorporateLobbying": 88.97, + "LegalQuAD (deu-Latn)": 47.85, + "LegalSummarization": 61.69, + "MMarcoRetrieval (cmn-Hans)": 76.01, + "MMarcoRetrieval": 76.04, + "MSMARCO-PL": 29.52, + "MedicalRetrieval (cmn-Hans)": 48.33, + "MedicalRetrieval": 48.35, + "MintakaRetrieval (ara-Arab)": 23.06, + "MintakaRetrieval (deu-Latn)": 29.8, + "MintakaRetrieval (spa-Latn)": 29.88, + "MintakaRetrieval (fra-Latn)": 30.96, + "MintakaRetrieval (hin-Deva)": 22.68, + "MintakaRetrieval (ita-Latn)": 29.77, + "MintakaRetrieval (jpn-Hira)": 22.98, + "MintakaRetrieval (por-Latn)": 30.62, + "MintakaRetrieval (fr)": 23.46, + "NFCorpus": 32.49, + "NFCorpus-PL (pol-Latn)": 25.99, + "NFCorpus-PL": 25.98, + "NQ-PL": 44.8, + "PIQA": 25.09, + "Quail": 3.52, + "Quora-PL": 81.22, + "RARbCode": 52.16, + "RARbMath": 65.35, + "RiaNewsRetrieval (rus-Cyrl)": 70.24, + "RuBQRetrieval (rus-Cyrl)": 69.58, + "SCIDOCS": 17.17, + "SCIDOCS-PL (pol-Latn)": 12.36, + "SCIDOCS-PL": 12.35, + "SIQA": 3.72, + "SciFact": 69.39, + "SciFact-PL (pol-Latn)": 62.26, + "SciFact-PL": 62.11, + "SpartQA": 7.91, + "SyntecRetrieval (fra-Latn)": 82.86, + "SyntecRetrieval": 80.49, + "T2Retrieval (cmn-Hans)": 70.77, + "T2Retrieval": 70.86, + "TRECCOVID": 69.5, + "TRECCOVID-PL (pol-Latn)": 65.94, + "TRECCOVID-PL": 66.06, + "TempReasonL1": 0.72, + "TempReasonL2Fact": 38.76, + "TempReasonL2Pure": 1.63, + "TempReasonL3Fact": 35.85, + "TempReasonL3Pure": 7.11, + "Touche2020": 21.5, + "VideoRetrieval (cmn-Hans)": 61.26, + "VideoRetrieval": 61.3, + "WinoGrande": 56.18, + "XPQARetrieval (ara-Arab_ara-Arab)": 39.97, + "XPQARetrieval (eng-Latn_ara-Arab)": 17.23, + "XPQARetrieval (ara-Arab_eng-Latn)": 34.35, + "XPQARetrieval (deu-Latn_deu-Latn)": 72.11, + "XPQARetrieval (eng-Latn_deu-Latn)": 28.91, + "XPQARetrieval (deu-Latn_eng-Latn)": 61.46, + "XPQARetrieval (spa-Latn_spa-Latn)": 58.35, + "XPQARetrieval (eng-Latn_spa-Latn)": 25.27, + "XPQARetrieval (spa-Latn_eng-Latn)": 51.07, + "XPQARetrieval (fra-Latn_fra-Latn)": 59.56, + "XPQARetrieval (eng-Latn_fra-Latn)": 23.69, + "XPQARetrieval (fra-Latn_eng-Latn)": 53.9, + "XPQARetrieval (hin-Deva_hin-Deva)": 70.56, + "XPQARetrieval (eng-Latn_hin-Deva)": 27.57, + "XPQARetrieval (hin-Deva_eng-Latn)": 63.68, + "XPQARetrieval (ita-Latn_ita-Latn)": 70.38, + "XPQARetrieval (eng-Latn_ita-Latn)": 26.06, + "XPQARetrieval (ita-Latn_eng-Latn)": 56.2, + "XPQARetrieval (jpn-Hira_jpn-Hira)": 71.97, + "XPQARetrieval (eng-Latn_jpn-Hira)": 17.63, + "XPQARetrieval (jpn-Hira_eng-Latn)": 61.03, + "XPQARetrieval (kor-Hang_kor-Hang)": 36.12, + "XPQARetrieval (eng-Latn_kor-Hang)": 20.27, + "XPQARetrieval (kor-Hang_eng-Latn)": 29.26, + "XPQARetrieval (pol-Latn_pol-Latn)": 48.1, + "XPQARetrieval (eng-Latn_pol-Latn)": 19.48, + "XPQARetrieval (pol-Latn_eng-Latn)": 40.18, + "XPQARetrieval (por-Latn_por-Latn)": 44.76, + "XPQARetrieval (eng-Latn_por-Latn)": 17.66, + "XPQARetrieval (por-Latn_eng-Latn)": 40.52, + "XPQARetrieval (tam-Taml_tam-Taml)": 35.25, + "XPQARetrieval (eng-Latn_tam-Taml)": 12.64, + "XPQARetrieval (tam-Taml_eng-Latn)": 26.73, + "XPQARetrieval (cmn-Hans_cmn-Hans)": 67.06, + "XPQARetrieval (eng-Latn_cmn-Hans)": 12.72, + "XPQARetrieval (cmn-Hans_eng-Latn)": 53.53, + "XPQARetrieval (fr)": 65.81 } ] }, "STS": { "spearman": [ { - "Model": "silver-retriever-base-v1", - "CDSC-R": 89.09, - "SICK-R-PL": 67.26, - "STS22 (pl)": 38.69 + "Model": "multilingual-e5-base", + "AFQMC (cmn-Hans)": 29.66, + "AFQMC": 29.67, + "ATEC (cmn-Hans)": 37.01, + "ATEC": 37.01, + "BIOSSES": 85.05, + "BQ (cmn-Hans)": 45.45, + "BQ": 45.45, + "CDSC-R (pol-Latn)": 90.09, + "CDSC-R": 90.08, + "LCQMC (cmn-Hans)": 74.15, + "LCQMC": 74.15, + "PAWSX (cmn-Hans)": 12.13, + "PAWSX": 12.14, + "QBQTC": 28.81, + "RUParaPhraserSTS (rus-Cyrl)": 70.17, + "RuSTSBenchmarkSTS (rus-Cyrl)": 79.64, + "SICK-R": 78.51, + "SICK-R-PL (pol-Latn)": 71.23, + "SICK-R-PL": 71.23, + "SICKFr (fra-Latn)": 75.76, + "SICKFr": 76.23, + "STS12": 76.7, + "STS13": 78.02, + "STS14": 76.6, + "STS15": 88.16, + "STS16": 84.28, + "STS17 (fra-Latn_eng-Latn)": 80.18, + "STS17 (ita-Latn_eng-Latn)": 80.16, + "STS17 (eng-Latn_ara-Arab)": 71.27, + "STS17 (kor-Hang)": 79.95, + "STS17 (eng-Latn_tur-Latn)": 63.3, + "STS17 (spa-Latn_eng-Latn)": 76.56, + "STS17 (spa-Latn)": 86.74, + "STS17 (en-en)": 87.84, + "STS17 (ara-Arab)": 74.48, + "STS17 (nld-Latn_eng-Latn)": 79.29, + "STS17 (eng-Latn_deu-Latn)": 82.08, + "STS22 (fra-Latn)": 75.04, + "STS22 (ara-Arab)": 57.82, + "STS22 (en)": 62.26, + "STS22 (spa-Latn)": 66.67, + "STS22 (fra-Latn_pol-Latn)": 73.25, + "STS22 (ita-Latn)": 77.76, + "STS22 (pol-Latn_eng-Latn)": 70.37, + "STS22 (tur-Latn)": 63.71, + "STS22 (rus-Cyrl)": 60.67, + "STS22 (deu-Latn)": 55.95, + "STS22 (deu-Latn_fra-Latn)": 59.68, + "STS22 (spa-Latn_eng-Latn)": 74.0, + "STS22 (cmn-Hans_eng-Latn)": 69.8, + "STS22 (pol-Latn)": 34.08, + "STS22 (spa-Latn_ita-Latn)": 66.43, + "STS22 (cmn-Hans)": 65.63, + "STS22 (deu-Latn_pol-Latn)": 39.35, + "STS22 (deu-Latn_eng-Latn)": 54.89, + "STS22 (zh)": 65.64, + "STS22 (pl)": 34.07, + "STSB (cmn-Hans)": 79.04, + "STSB": 79.05, + "STSBenchmark": 85.64, + "STSBenchmarkMultilingualSTS (rus-Cyrl)": 79.33, + "STSBenchmarkMultilingualSTS (pol-Latn)": 74.93, + "STSBenchmarkMultilingualSTS (spa-Latn)": 81.75, + "STSBenchmarkMultilingualSTS (en)": 85.64, + "STSBenchmarkMultilingualSTS (cmn-Hans)": 79.87, + "STSBenchmarkMultilingualSTS (fra-Latn)": 80.85, + "STSBenchmarkMultilingualSTS (deu-Latn)": 79.68, + "STSBenchmarkMultilingualSTS (nld-Latn)": 75.96, + "STSBenchmarkMultilingualSTS (por-Latn)": 67.16, + "STSBenchmarkMultilingualSTS (ita-Latn)": 78.09, + "STSBenchmarkMultilingualSTS (fr)": 80.62 } ] }, "Summarization": { "spearman": [ { - "Model": "silver-retriever-base-v1" + "Model": "multilingual-e5-base", + "SummEval": 30.23, + "SummEvalFr (fra-Latn)": 32.96, + "SummEvalFr": 30.76 } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "silver-retriever-base-v1" + "Model": "multilingual-e5-base" } ] } }, - "electra-small-nordic": { + "instructor-base": { "BitextMining": { "f1": [ { - "Model": "electra-small-nordic", - "BornholmBitextMining": 1.44 + "Model": "instructor-base" } ] }, "Classification": { "accuracy": [ { - "Model": "electra-small-nordic", - "AngryTweetsClassification": 47.91, - "DKHateClassification": 59.45, - "DanishPoliticalCommentsClassification": 31.89, - "LccSentimentClassification": 47.93, - "MassiveIntentClassification (da)": 26.3, - "MassiveIntentClassification (nb)": 24.6, - "MassiveIntentClassification (sv)": 27.58, - "MassiveScenarioClassification (da)": 28.93, - "MassiveScenarioClassification (nb)": 27.3, - "MassiveScenarioClassification (sv)": 29.93, - "NoRecClassification": 45.44, - "NordicLangClassification": 57.82, - "NorwegianParliament": 53.25, - "ScalaDaClassification": 70.41, - "ScalaNbClassification": 75.28 + "Model": "instructor-base" } ] }, "Clustering": { "v_measure": [ { - "Model": "electra-small-nordic" + "Model": "instructor-base" } ] }, "PairClassification": { "ap": [ { - "Model": "electra-small-nordic" + "Model": "instructor-base" } ] }, "Reranking": { "map": [ { - "Model": "electra-small-nordic" + "Model": "instructor-base" } ] }, "Retrieval": { "ndcg_at_10": [ { - "Model": "electra-small-nordic" + "Model": "instructor-base" } ] }, "STS": { "spearman": [ { - "Model": "electra-small-nordic" + "Model": "instructor-base" } ] }, "Summarization": { "spearman": [ { - "Model": "electra-small-nordic" + "Model": "instructor-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { - "Model": "electra-small-nordic" + "Model": "instructor-base", + "Core17InstructionRetrieval": -1.09, + "News21InstructionRetrieval": -1.78, + "Robust04InstructionRetrieval": -10.42 } ] } diff --git a/all_data_tasks/0/default.jsonl b/all_data_tasks/0/default.jsonl index 38d2b63e1d2420d5cdf12559fe9fef3166426fd6..b582576b4e24a1177c17872611eae742d44da400 100644 --- a/all_data_tasks/0/default.jsonl +++ b/all_data_tasks/0/default.jsonl @@ -1,209 +1,57 @@ -{"index":95,"Rank":1,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":89.05,"AmazonCounterfactualClassification (en)":92.72,"AmazonPolarityClassification":97.31,"AmazonReviewsClassification (en)":61.04,"Banking77Classification":90.02,"EmotionClassification":93.37,"ImdbClassification":96.8,"MassiveIntentClassification (en)":85.97,"MassiveScenarioClassification (en)":90.61,"MTOPDomainClassification (en)":98.58,"MTOPIntentClassification (en)":91.3,"ToxicConversationsClassification":91.14,"TweetSentimentExtractionClassification":79.7} -{"index":21,"Rank":2,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.95,"AmazonCounterfactualClassification (en)":93.15,"AmazonPolarityClassification":96.98,"AmazonReviewsClassification (en)":61.46,"Banking77Classification":91.49,"EmotionClassification":93.36,"ImdbClassification":96.91,"MassiveIntentClassification (en)":82.93,"MassiveScenarioClassification (en)":85.6,"MTOPDomainClassification (en)":98.42,"MTOPIntentClassification (en)":94.0,"ToxicConversationsClassification":93.17,"TweetSentimentExtractionClassification":79.93} -{"index":23,"Rank":3,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.08,"AmazonCounterfactualClassification (en)":89.48,"AmazonPolarityClassification":96.9,"AmazonReviewsClassification (en)":61.6,"Banking77Classification":92.53,"EmotionClassification":92.97,"ImdbClassification":96.66,"MassiveIntentClassification (en)":82.05,"MassiveScenarioClassification (en)":84.4,"MTOPDomainClassification (en)":98.61,"MTOPIntentClassification (en)":95.51,"ToxicConversationsClassification":87.34,"TweetSentimentExtractionClassification":78.86} -{"index":51,"Rank":4,"Model":"neural-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.91,"AmazonCounterfactualClassification (en)":93.1,"AmazonPolarityClassification":97.54,"AmazonReviewsClassification (en)":61.17,"Banking77Classification":88.73,"EmotionClassification":91.36,"ImdbClassification":96.92,"MassiveIntentClassification (en)":82.5,"MassiveScenarioClassification (en)":84.5,"MTOPDomainClassification (en)":99.03,"MTOPIntentClassification (en)":90.94,"ToxicConversationsClassification":91.17,"TweetSentimentExtractionClassification":77.93} -{"index":138,"Rank":5,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.63,"AmazonCounterfactualClassification (en)":92.87,"AmazonPolarityClassification":97.16,"AmazonReviewsClassification (en)":59.36,"Banking77Classification":89.79,"EmotionClassification":84.29,"ImdbClassification":96.66,"MassiveIntentClassification (en)":85.83,"MassiveScenarioClassification (en)":90.2,"MTOPDomainClassification (en)":99.01,"MTOPIntentClassification (en)":92.78,"ToxicConversationsClassification":88.76,"TweetSentimentExtractionClassification":74.84} -{"index":215,"Rank":6,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Average":87.35,"AmazonCounterfactualClassification (en)":95.12,"AmazonPolarityClassification":97.14,"AmazonReviewsClassification (en)":55.47,"Banking77Classification":90.34,"EmotionClassification":91.7,"ImdbClassification":97.06,"MassiveIntentClassification (en)":80.07,"MassiveScenarioClassification (en)":81.74,"MTOPDomainClassification (en)":96.51,"MTOPIntentClassification (en)":89.77,"ToxicConversationsClassification":92.6,"TweetSentimentExtractionClassification":80.64} -{"index":139,"Rank":7,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.67,"AmazonCounterfactualClassification (en)":92.36,"AmazonPolarityClassification":97.19,"AmazonReviewsClassification (en)":59.53,"Banking77Classification":89.3,"EmotionClassification":78.77,"ImdbClassification":96.49,"MassiveIntentClassification (en)":85.17,"MassiveScenarioClassification (en)":89.62,"MTOPDomainClassification (en)":98.83,"MTOPIntentClassification (en)":92.3,"ToxicConversationsClassification":86.94,"TweetSentimentExtractionClassification":73.58} -{"index":205,"Rank":8,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.58,"AmazonCounterfactualClassification (en)":91.31,"AmazonPolarityClassification":97.5,"AmazonReviewsClassification (en)":62.56,"Banking77Classification":87.57,"EmotionClassification":79.45,"ImdbClassification":96.75,"MassiveIntentClassification (en)":85.41,"MassiveScenarioClassification (en)":89.77,"MTOPDomainClassification (en)":99.04,"MTOPIntentClassification (en)":91.88,"ToxicConversationsClassification":85.12,"TweetSentimentExtractionClassification":72.58} -{"index":17,"Rank":9,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":86.58,"AmazonCounterfactualClassification (en)":91.31,"AmazonPolarityClassification":97.5,"AmazonReviewsClassification (en)":62.56,"Banking77Classification":87.57,"EmotionClassification":79.45,"ImdbClassification":96.75,"MassiveIntentClassification (en)":85.41,"MassiveScenarioClassification (en)":89.77,"MTOPDomainClassification (en)":99.04,"MTOPIntentClassification (en)":91.88,"ToxicConversationsClassification":85.12,"TweetSentimentExtractionClassification":72.58} -{"index":126,"Rank":10,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.58,"AmazonCounterfactualClassification (en)":91.31,"AmazonPolarityClassification":97.5,"AmazonReviewsClassification (en)":62.56,"Banking77Classification":87.57,"EmotionClassification":79.45,"ImdbClassification":96.75,"MassiveIntentClassification (en)":85.41,"MassiveScenarioClassification (en)":89.77,"MTOPDomainClassification (en)":99.04,"MTOPIntentClassification (en)":91.88,"ToxicConversationsClassification":85.12,"TweetSentimentExtractionClassification":72.58} -{"index":16,"Rank":11,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.47,"AmazonCounterfactualClassification (en)":83.99,"AmazonPolarityClassification":96.61,"AmazonReviewsClassification (en)":55.61,"Banking77Classification":87.31,"EmotionClassification":61.37,"ImdbClassification":95.83,"MassiveIntentClassification (en)":82.4,"MassiveScenarioClassification (en)":84.5,"MTOPDomainClassification (en)":97.69,"MTOPIntentClassification (en)":88.76,"ToxicConversationsClassification":82.66,"TweetSentimentExtractionClassification":72.95} -{"index":204,"Rank":12,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.47,"AmazonCounterfactualClassification (en)":83.99,"AmazonPolarityClassification":96.61,"AmazonReviewsClassification (en)":55.61,"Banking77Classification":87.31,"EmotionClassification":61.37,"ImdbClassification":95.83,"MassiveIntentClassification (en)":82.4,"MassiveScenarioClassification (en)":84.5,"MTOPDomainClassification (en)":97.69,"MTOPIntentClassification (en)":88.76,"ToxicConversationsClassification":82.66,"TweetSentimentExtractionClassification":72.95} -{"index":6,"Rank":13,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.49,"AmazonCounterfactualClassification (en)":77.6,"AmazonPolarityClassification":96.58,"AmazonReviewsClassification (en)":50.77,"Banking77Classification":86.96,"EmotionClassification":59.81,"ImdbClassification":96.13,"MassiveIntentClassification (en)":81.08,"MassiveScenarioClassification (en)":87.95,"MTOPDomainClassification (en)":98.86,"MTOPIntentClassification (en)":86.97,"ToxicConversationsClassification":83.58,"TweetSentimentExtractionClassification":71.55} -{"index":1,"Rank":14,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":81.17,"AmazonCounterfactualClassification (en)":75.34,"AmazonPolarityClassification":97.34,"AmazonReviewsClassification (en)":51.17,"Banking77Classification":88.62,"EmotionClassification":52.51,"ImdbClassification":95.65,"MassiveIntentClassification (en)":80.22,"MassiveScenarioClassification (en)":87.19,"MTOPDomainClassification (en)":98.35,"MTOPIntentClassification (en)":83.43,"ToxicConversationsClassification":89.67,"TweetSentimentExtractionClassification":74.52} -{"index":58,"Rank":15,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":80.2,"AmazonCounterfactualClassification (en)":84.43,"AmazonPolarityClassification":95.7,"AmazonReviewsClassification (en)":57.64,"Banking77Classification":87.88,"EmotionClassification":51.82,"ImdbClassification":94.78,"MassiveIntentClassification (en)":82.67,"MassiveScenarioClassification (en)":85.01,"MTOPDomainClassification (en)":96.83,"MTOPIntentClassification (en)":89.57,"ToxicConversationsClassification":71.29,"TweetSentimentExtractionClassification":64.76} -{"index":15,"Rank":16,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":79.6,"AmazonCounterfactualClassification (en)":83.16,"AmazonPolarityClassification":96.7,"AmazonReviewsClassification (en)":62.17,"Banking77Classification":81.68,"EmotionClassification":54.53,"ImdbClassification":95.58,"MassiveIntentClassification (en)":78.47,"MassiveScenarioClassification (en)":78.19,"MTOPDomainClassification (en)":95.75,"MTOPIntentClassification (en)":84.26,"ToxicConversationsClassification":78.75,"TweetSentimentExtractionClassification":66.0} -{"index":9,"Rank":17,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":79.25,"AmazonCounterfactualClassification (en)":88.31,"AmazonPolarityClassification":96.32,"AmazonReviewsClassification (en)":56.25,"Banking77Classification":88.59,"EmotionClassification":50.28,"ImdbClassification":95.75,"MassiveIntentClassification (en)":73.97,"MassiveScenarioClassification (en)":83.99,"MTOPDomainClassification (en)":97.65,"MTOPIntentClassification (en)":75.16,"ToxicConversationsClassification":81.75,"TweetSentimentExtractionClassification":62.98} -{"index":0,"Rank":18,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":79.0,"AmazonCounterfactualClassification (en)":70.93,"AmazonPolarityClassification":97.34,"AmazonReviewsClassification (en)":48.47,"Banking77Classification":86.01,"EmotionClassification":51.53,"ImdbClassification":95.7,"MassiveIntentClassification (en)":75.67,"MassiveScenarioClassification (en)":85.16,"MTOPDomainClassification (en)":98.02,"MTOPIntentClassification (en)":77.82,"ToxicConversationsClassification":88.33,"TweetSentimentExtractionClassification":72.97} -{"index":43,"Rank":19,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.53,"AmazonCounterfactualClassification (en)":80.48,"AmazonPolarityClassification":96.32,"AmazonReviewsClassification (en)":57.18,"Banking77Classification":87.46,"EmotionClassification":50.06,"ImdbClassification":94.32,"MassiveIntentClassification (en)":79.72,"MassiveScenarioClassification (en)":81.09,"MTOPDomainClassification (en)":95.29,"MTOPIntentClassification (en)":87.08,"ToxicConversationsClassification":70.89,"TweetSentimentExtractionClassification":62.48} -{"index":156,"Rank":20,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":78.47,"AmazonCounterfactualClassification (en)":78.69,"AmazonPolarityClassification":95.91,"AmazonReviewsClassification (en)":55.79,"Banking77Classification":88.23,"EmotionClassification":49.77,"ImdbClassification":94.78,"MassiveIntentClassification (en)":80.57,"MassiveScenarioClassification (en)":82.39,"MTOPDomainClassification (en)":96.12,"MTOPIntentClassification (en)":86.11,"ToxicConversationsClassification":69.59,"TweetSentimentExtractionClassification":63.72} -{"index":96,"Rank":21,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":78.33,"AmazonCounterfactualClassification (en)":77.93,"AmazonPolarityClassification":95.97,"AmazonReviewsClassification (en)":54.35,"Banking77Classification":88.81,"EmotionClassification":50.24,"ImdbClassification":94.79,"MassiveIntentClassification (en)":79.99,"MassiveScenarioClassification (en)":82.2,"MTOPDomainClassification (en)":96.36,"MTOPIntentClassification (en)":86.3,"ToxicConversationsClassification":69.33,"TweetSentimentExtractionClassification":63.64} -{"index":19,"Rank":22,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.75,"AmazonCounterfactualClassification (en)":73.01,"AmazonPolarityClassification":93.97,"AmazonReviewsClassification (en)":54.2,"Banking77Classification":87.33,"EmotionClassification":46.77,"ImdbClassification":92.1,"MassiveIntentClassification (en)":78.94,"MassiveScenarioClassification (en)":81.41,"MTOPDomainClassification (en)":96.6,"MTOPIntentClassification (en)":82.9,"ToxicConversationsClassification":82.61,"TweetSentimentExtractionClassification":63.16} -{"index":161,"Rank":23,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.56,"AmazonCounterfactualClassification (en)":76.24,"AmazonPolarityClassification":96.29,"AmazonReviewsClassification (en)":56.72,"Banking77Classification":85.73,"EmotionClassification":51.51,"ImdbClassification":94.6,"MassiveIntentClassification (en)":77.06,"MassiveScenarioClassification (en)":80.47,"MTOPDomainClassification (en)":93.93,"MTOPIntentClassification (en)":82.46,"ToxicConversationsClassification":71.06,"TweetSentimentExtractionClassification":64.62} -{"index":178,"Rank":24,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.43,"AmazonCounterfactualClassification (en)":82.97,"AmazonPolarityClassification":90.98,"AmazonReviewsClassification (en)":48.71,"Banking77Classification":88.15,"EmotionClassification":52.18,"ImdbClassification":87.42,"MassiveIntentClassification (en)":79.67,"MassiveScenarioClassification (en)":82.82,"MTOPDomainClassification (en)":96.16,"MTOPIntentClassification (en)":85.75,"ToxicConversationsClassification":71.91,"TweetSentimentExtractionClassification":62.4} -{"index":219,"Rank":25,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.17,"AmazonCounterfactualClassification (en)":79.66,"AmazonPolarityClassification":94.48,"AmazonReviewsClassification (en)":48.16,"Banking77Classification":88.91,"EmotionClassification":52.01,"ImdbClassification":89.47,"MassiveIntentClassification (en)":80.12,"MassiveScenarioClassification (en)":82.7,"MTOPDomainClassification (en)":96.46,"MTOPIntentClassification (en)":85.38,"ToxicConversationsClassification":66.18,"TweetSentimentExtractionClassification":62.54} -{"index":18,"Rank":26,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.17,"AmazonCounterfactualClassification (en)":74.79,"AmazonPolarityClassification":93.02,"AmazonReviewsClassification (en)":53.31,"Banking77Classification":86.73,"EmotionClassification":46.39,"ImdbClassification":87.48,"MassiveIntentClassification (en)":77.67,"MassiveScenarioClassification (en)":81.77,"MTOPDomainClassification (en)":96.5,"MTOPIntentClassification (en)":82.81,"ToxicConversationsClassification":83.98,"TweetSentimentExtractionClassification":61.57} -{"index":64,"Rank":27,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":76.63,"AmazonCounterfactualClassification (en)":77.58,"AmazonPolarityClassification":91.12,"AmazonReviewsClassification (en)":49.97,"Banking77Classification":88.31,"EmotionClassification":52.04,"ImdbClassification":87.42,"MassiveIntentClassification (en)":79.29,"MassiveScenarioClassification (en)":81.64,"MTOPDomainClassification (en)":96.04,"MTOPIntentClassification (en)":84.77,"ToxicConversationsClassification":69.26,"TweetSentimentExtractionClassification":62.14} -{"index":34,"Rank":28,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.49,"AmazonCounterfactualClassification (en)":81.3,"AmazonPolarityClassification":95.62,"AmazonReviewsClassification (en)":51.72,"Banking77Classification":85.53,"EmotionClassification":51.57,"ImdbClassification":93.57,"MassiveIntentClassification (en)":73.84,"MassiveScenarioClassification (en)":78.74,"MTOPDomainClassification (en)":94.88,"MTOPIntentClassification (en)":76.52,"ToxicConversationsClassification":72.61,"TweetSentimentExtractionClassification":62.02} -{"index":60,"Rank":29,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":76.33,"AmazonCounterfactualClassification (en)":82.22,"AmazonPolarityClassification":89.69,"AmazonReviewsClassification (en)":48.47,"Banking77Classification":88.17,"EmotionClassification":51.71,"ImdbClassification":85.78,"MassiveIntentClassification (en)":78.06,"MassiveScenarioClassification (en)":81.35,"MTOPDomainClassification (en)":95.57,"MTOPIntentClassification (en)":82.81,"ToxicConversationsClassification":71.01,"TweetSentimentExtractionClassification":61.11} -{"index":118,"Rank":30,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.11,"AmazonCounterfactualClassification (en)":75.27,"AmazonPolarityClassification":93.23,"AmazonReviewsClassification (en)":49.72,"Banking77Classification":86.65,"EmotionClassification":55.89,"ImdbClassification":89.49,"MassiveIntentClassification (en)":75.53,"MassiveScenarioClassification (en)":79.23,"MTOPDomainClassification (en)":95.48,"MTOPIntentClassification (en)":79.09,"ToxicConversationsClassification":72.8,"TweetSentimentExtractionClassification":61.0} -{"index":115,"Rank":31,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.03,"AmazonCounterfactualClassification (en)":75.96,"AmazonPolarityClassification":93.51,"AmazonReviewsClassification (en)":50.45,"Banking77Classification":87.3,"EmotionClassification":54.68,"ImdbClassification":89.66,"MassiveIntentClassification (en)":76.01,"MassiveScenarioClassification (en)":79.64,"MTOPDomainClassification (en)":95.3,"MTOPIntentClassification (en)":78.1,"ToxicConversationsClassification":72.42,"TweetSentimentExtractionClassification":59.32} -{"index":117,"Rank":32,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.01,"AmazonCounterfactualClassification (en)":75.58,"AmazonPolarityClassification":93.41,"AmazonReviewsClassification (en)":49.06,"Banking77Classification":88.1,"EmotionClassification":54.72,"ImdbClassification":91.23,"MassiveIntentClassification (en)":76.2,"MassiveScenarioClassification (en)":79.35,"MTOPDomainClassification (en)":95.25,"MTOPIntentClassification (en)":78.24,"ToxicConversationsClassification":71.86,"TweetSentimentExtractionClassification":59.17} -{"index":36,"Rank":33,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.01,"AmazonCounterfactualClassification (en)":77.85,"AmazonPolarityClassification":95.6,"AmazonReviewsClassification (en)":49.79,"Banking77Classification":86.09,"EmotionClassification":48.15,"ImdbClassification":93.97,"MassiveIntentClassification (en)":74.51,"MassiveScenarioClassification (en)":79.0,"MTOPDomainClassification (en)":94.92,"MTOPIntentClassification (en)":78.89,"ToxicConversationsClassification":71.2,"TweetSentimentExtractionClassification":62.18} -{"index":186,"Rank":34,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.99,"AmazonCounterfactualClassification (en)":76.06,"AmazonPolarityClassification":91.98,"AmazonReviewsClassification (en)":47.94,"Banking77Classification":87.9,"EmotionClassification":52.03,"ImdbClassification":92.79,"MassiveIntentClassification (en)":77.41,"MassiveScenarioClassification (en)":80.45,"MTOPDomainClassification (en)":94.59,"MTOPIntentClassification (en)":79.26,"ToxicConversationsClassification":71.42,"TweetSentimentExtractionClassification":60.0} -{"index":22,"Rank":35,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":75.97,"AmazonCounterfactualClassification (en)":75.85,"AmazonPolarityClassification":92.42,"AmazonReviewsClassification (en)":48.18,"Banking77Classification":87.79,"EmotionClassification":51.52,"ImdbClassification":92.85,"MassiveIntentClassification (en)":77.56,"MassiveScenarioClassification (en)":80.53,"MTOPDomainClassification (en)":94.59,"MTOPIntentClassification (en)":79.49,"ToxicConversationsClassification":70.91,"TweetSentimentExtractionClassification":59.94} -{"index":150,"Rank":36,"Model":"bge-large-en-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.97,"AmazonCounterfactualClassification (en)":75.85,"AmazonPolarityClassification":92.42,"AmazonReviewsClassification (en)":48.18,"Banking77Classification":87.79,"EmotionClassification":51.52,"ImdbClassification":92.85,"MassiveIntentClassification (en)":77.56,"MassiveScenarioClassification (en)":80.53,"MTOPDomainClassification (en)":94.59,"MTOPIntentClassification (en)":79.49,"ToxicConversationsClassification":70.91,"TweetSentimentExtractionClassification":59.94} -{"index":114,"Rank":37,"Model":"bge-large-en-v1.5-Q4_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.97,"AmazonCounterfactualClassification (en)":75.85,"AmazonPolarityClassification":92.42,"AmazonReviewsClassification (en)":48.18,"Banking77Classification":87.79,"EmotionClassification":51.52,"ImdbClassification":92.85,"MassiveIntentClassification (en)":77.56,"MassiveScenarioClassification (en)":80.53,"MTOPDomainClassification (en)":94.59,"MTOPIntentClassification (en)":79.49,"ToxicConversationsClassification":70.91,"TweetSentimentExtractionClassification":59.94} -{"index":119,"Rank":38,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.97,"AmazonCounterfactualClassification (en)":75.76,"AmazonPolarityClassification":93.3,"AmazonReviewsClassification (en)":49.99,"Banking77Classification":86.4,"EmotionClassification":55.07,"ImdbClassification":90.15,"MassiveIntentClassification (en)":76.01,"MassiveScenarioClassification (en)":79.33,"MTOPDomainClassification (en)":95.29,"MTOPIntentClassification (en)":79.57,"ToxicConversationsClassification":69.44,"TweetSentimentExtractionClassification":61.27} -{"index":125,"Rank":39,"Model":"NoInstruct-small-Embedding-v0-Q4_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.97,"AmazonCounterfactualClassification (en)":75.76,"AmazonPolarityClassification":93.3,"AmazonReviewsClassification (en)":49.99,"Banking77Classification":86.4,"EmotionClassification":55.07,"ImdbClassification":90.15,"MassiveIntentClassification (en)":76.01,"MassiveScenarioClassification (en)":79.33,"MTOPDomainClassification (en)":95.29,"MTOPIntentClassification (en)":79.57,"ToxicConversationsClassification":69.44,"TweetSentimentExtractionClassification":61.27} -{"index":62,"Rank":40,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":75.92,"AmazonCounterfactualClassification (en)":79.94,"AmazonPolarityClassification":86.07,"AmazonReviewsClassification (en)":46.84,"Banking77Classification":88.05,"EmotionClassification":51.2,"ImdbClassification":82.94,"MassiveIntentClassification (en)":79.8,"MassiveScenarioClassification (en)":81.52,"MTOPDomainClassification (en)":96.14,"MTOPIntentClassification (en)":86.11,"ToxicConversationsClassification":70.59,"TweetSentimentExtractionClassification":61.9} -{"index":194,"Rank":41,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.64,"AmazonCounterfactualClassification (en)":75.04,"AmazonPolarityClassification":93.84,"AmazonReviewsClassification (en)":49.18,"Banking77Classification":87.82,"EmotionClassification":50.88,"ImdbClassification":92.83,"MassiveIntentClassification (en)":76.24,"MassiveScenarioClassification (en)":79.95,"MTOPDomainClassification (en)":93.95,"MTOPIntentClassification (en)":76.79,"ToxicConversationsClassification":71.48,"TweetSentimentExtractionClassification":59.71} -{"index":133,"Rank":42,"Model":"mxbai-embed-large-v1-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.64,"AmazonCounterfactualClassification (en)":75.04,"AmazonPolarityClassification":93.84,"AmazonReviewsClassification (en)":49.18,"Banking77Classification":87.82,"EmotionClassification":50.88,"ImdbClassification":92.83,"MassiveIntentClassification (en)":76.24,"MassiveScenarioClassification (en)":79.95,"MTOPDomainClassification (en)":93.95,"MTOPIntentClassification (en)":76.79,"ToxicConversationsClassification":71.48,"TweetSentimentExtractionClassification":59.71} -{"index":111,"Rank":43,"Model":"UAE-Large-V1-Q5_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.58,"AmazonCounterfactualClassification (en)":75.55,"AmazonPolarityClassification":92.84,"AmazonReviewsClassification (en)":48.29,"Banking77Classification":87.69,"EmotionClassification":51.75,"ImdbClassification":92.78,"MassiveIntentClassification (en)":76.5,"MassiveScenarioClassification (en)":79.75,"MTOPDomainClassification (en)":94.02,"MTOPIntentClassification (en)":76.92,"ToxicConversationsClassification":71.09,"TweetSentimentExtractionClassification":59.75} -{"index":108,"Rank":44,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.58,"AmazonCounterfactualClassification (en)":75.55,"AmazonPolarityClassification":92.84,"AmazonReviewsClassification (en)":48.29,"Banking77Classification":87.69,"EmotionClassification":51.75,"ImdbClassification":92.78,"MassiveIntentClassification (en)":76.5,"MassiveScenarioClassification (en)":79.75,"MTOPDomainClassification (en)":94.02,"MTOPIntentClassification (en)":76.92,"ToxicConversationsClassification":71.09,"TweetSentimentExtractionClassification":59.75} -{"index":165,"Rank":45,"Model":"UAE-Large-V1-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.58,"AmazonCounterfactualClassification (en)":75.55,"AmazonPolarityClassification":92.84,"AmazonReviewsClassification (en)":48.29,"Banking77Classification":87.69,"EmotionClassification":51.75,"ImdbClassification":92.78,"MassiveIntentClassification (en)":76.5,"MassiveScenarioClassification (en)":79.75,"MTOPDomainClassification (en)":94.02,"MTOPIntentClassification (en)":76.92,"ToxicConversationsClassification":71.09,"TweetSentimentExtractionClassification":59.75} -{"index":20,"Rank":46,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":75.53,"AmazonCounterfactualClassification (en)":76.15,"AmazonPolarityClassification":93.39,"AmazonReviewsClassification (en)":48.85,"Banking77Classification":86.95,"EmotionClassification":51.9,"ImdbClassification":90.81,"MassiveIntentClassification (en)":76.15,"MassiveScenarioClassification (en)":80.16,"MTOPDomainClassification (en)":94.17,"MTOPIntentClassification (en)":76.91,"ToxicConversationsClassification":71.58,"TweetSentimentExtractionClassification":59.38} -{"index":181,"Rank":47,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.53,"AmazonCounterfactualClassification (en)":76.15,"AmazonPolarityClassification":93.39,"AmazonReviewsClassification (en)":48.85,"Banking77Classification":86.95,"EmotionClassification":51.9,"ImdbClassification":90.81,"MassiveIntentClassification (en)":76.15,"MassiveScenarioClassification (en)":80.16,"MTOPDomainClassification (en)":94.17,"MTOPIntentClassification (en)":76.91,"ToxicConversationsClassification":71.58,"TweetSentimentExtractionClassification":59.38} -{"index":180,"Rank":48,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.53,"AmazonCounterfactualClassification (en)":76.15,"AmazonPolarityClassification":93.39,"AmazonReviewsClassification (en)":48.85,"Banking77Classification":86.95,"EmotionClassification":51.9,"ImdbClassification":90.81,"MassiveIntentClassification (en)":76.15,"MassiveScenarioClassification (en)":80.16,"MTOPDomainClassification (en)":94.17,"MTOPIntentClassification (en)":76.91,"ToxicConversationsClassification":71.58,"TweetSentimentExtractionClassification":59.38} -{"index":182,"Rank":49,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.53,"AmazonCounterfactualClassification (en)":76.15,"AmazonPolarityClassification":93.39,"AmazonReviewsClassification (en)":48.85,"Banking77Classification":86.95,"EmotionClassification":51.9,"ImdbClassification":90.81,"MassiveIntentClassification (en)":76.15,"MassiveScenarioClassification (en)":80.16,"MTOPDomainClassification (en)":94.17,"MTOPIntentClassification (en)":76.91,"ToxicConversationsClassification":71.58,"TweetSentimentExtractionClassification":59.38} -{"index":120,"Rank":50,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.53,"AmazonCounterfactualClassification (en)":76.15,"AmazonPolarityClassification":93.39,"AmazonReviewsClassification (en)":48.85,"Banking77Classification":86.95,"EmotionClassification":51.9,"ImdbClassification":90.81,"MassiveIntentClassification (en)":76.15,"MassiveScenarioClassification (en)":80.16,"MTOPDomainClassification (en)":94.17,"MTOPIntentClassification (en)":76.91,"ToxicConversationsClassification":71.58,"TweetSentimentExtractionClassification":59.38} -{"index":179,"Rank":51,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.53,"AmazonCounterfactualClassification (en)":76.15,"AmazonPolarityClassification":93.39,"AmazonReviewsClassification (en)":48.85,"Banking77Classification":86.95,"EmotionClassification":51.9,"ImdbClassification":90.81,"MassiveIntentClassification (en)":76.15,"MassiveScenarioClassification (en)":80.16,"MTOPDomainClassification (en)":94.17,"MTOPIntentClassification (en)":76.91,"ToxicConversationsClassification":71.58,"TweetSentimentExtractionClassification":59.38} -{"index":283,"Rank":52,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.45,"AmazonCounterfactualClassification (en)":78.93,"AmazonPolarityClassification":92.85,"AmazonReviewsClassification (en)":48.7,"Banking77Classification":85.69,"EmotionClassification":51.58,"ImdbClassification":87.67,"MassiveIntentClassification (en)":74.64,"MassiveScenarioClassification (en)":79.79,"MTOPDomainClassification (en)":95.36,"MTOPIntentClassification (en)":75.07,"ToxicConversationsClassification":72.92,"TweetSentimentExtractionClassification":62.22} -{"index":137,"Rank":53,"Model":"stella-base-en-v2-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.28,"AmazonCounterfactualClassification (en)":77.19,"AmazonPolarityClassification":93.26,"AmazonReviewsClassification (en)":49.61,"Banking77Classification":84.73,"EmotionClassification":54.47,"ImdbClassification":91.31,"MassiveIntentClassification (en)":73.88,"MassiveScenarioClassification (en)":78.28,"MTOPDomainClassification (en)":93.5,"MTOPIntentClassification (en)":71.06,"ToxicConversationsClassification":72.99,"TweetSentimentExtractionClassification":63.07} -{"index":151,"Rank":54,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.28,"AmazonCounterfactualClassification (en)":77.19,"AmazonPolarityClassification":93.26,"AmazonReviewsClassification (en)":49.61,"Banking77Classification":84.73,"EmotionClassification":54.47,"ImdbClassification":91.31,"MassiveIntentClassification (en)":73.88,"MassiveScenarioClassification (en)":78.28,"MTOPDomainClassification (en)":93.5,"MTOPIntentClassification (en)":71.06,"ToxicConversationsClassification":72.99,"TweetSentimentExtractionClassification":63.07} -{"index":93,"Rank":55,"Model":"e5-large-v2-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.24,"AmazonCounterfactualClassification (en)":79.22,"AmazonPolarityClassification":93.75,"AmazonReviewsClassification (en)":48.61,"Banking77Classification":84.55,"EmotionClassification":49.45,"ImdbClassification":91.69,"MassiveIntentClassification (en)":73.84,"MassiveScenarioClassification (en)":78.11,"MTOPDomainClassification (en)":94.62,"MTOPIntentClassification (en)":77.14,"ToxicConversationsClassification":70.9,"TweetSentimentExtractionClassification":60.94} -{"index":155,"Rank":56,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":75.24,"AmazonCounterfactualClassification (en)":79.22,"AmazonPolarityClassification":93.75,"AmazonReviewsClassification (en)":48.61,"Banking77Classification":84.55,"EmotionClassification":49.45,"ImdbClassification":91.69,"MassiveIntentClassification (en)":73.84,"MassiveScenarioClassification (en)":78.11,"MTOPDomainClassification (en)":94.62,"MTOPIntentClassification (en)":77.14,"ToxicConversationsClassification":70.9,"TweetSentimentExtractionClassification":60.94} -{"index":261,"Rank":57,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.16,"AmazonCounterfactualClassification (en)":75.18,"AmazonPolarityClassification":93.07,"AmazonReviewsClassification (en)":48.42,"Banking77Classification":88.03,"EmotionClassification":51.93,"ImdbClassification":91.91,"MassiveIntentClassification (en)":75.91,"MassiveScenarioClassification (en)":79.43,"MTOPDomainClassification (en)":94.33,"MTOPIntentClassification (en)":76.61,"ToxicConversationsClassification":67.91,"TweetSentimentExtractionClassification":59.22} -{"index":197,"Rank":58,"Model":"b1ade-embed-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.16,"AmazonCounterfactualClassification (en)":75.18,"AmazonPolarityClassification":93.07,"AmazonReviewsClassification (en)":48.42,"Banking77Classification":88.03,"EmotionClassification":51.93,"ImdbClassification":91.91,"MassiveIntentClassification (en)":75.91,"MassiveScenarioClassification (en)":79.43,"MTOPDomainClassification (en)":94.33,"MTOPIntentClassification (en)":76.61,"ToxicConversationsClassification":67.91,"TweetSentimentExtractionClassification":59.22} -{"index":198,"Rank":59,"Model":"bge-base-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.98,"AmazonCounterfactualClassification (en)":76.16,"AmazonPolarityClassification":92.95,"AmazonReviewsClassification (en)":48.21,"Banking77Classification":86.35,"EmotionClassification":51.27,"ImdbClassification":89.72,"MassiveIntentClassification (en)":75.16,"MassiveScenarioClassification (en)":78.93,"MTOPDomainClassification (en)":93.95,"MTOPIntentClassification (en)":75.26,"ToxicConversationsClassification":72.12,"TweetSentimentExtractionClassification":59.67} -{"index":160,"Rank":60,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":74.81,"AmazonCounterfactualClassification (en)":79.06,"AmazonPolarityClassification":93.49,"AmazonReviewsClassification (en)":47.56,"Banking77Classification":84.73,"EmotionClassification":46.5,"ImdbClassification":90.23,"MassiveIntentClassification (en)":73.76,"MassiveScenarioClassification (en)":77.51,"MTOPDomainClassification (en)":93.67,"MTOPIntentClassification (en)":77.9,"ToxicConversationsClassification":71.32,"TweetSentimentExtractionClassification":61.98} -{"index":8,"Rank":61,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.79,"AmazonCounterfactualClassification (en)":71.43,"AmazonPolarityClassification":96.41,"AmazonReviewsClassification (en)":57.06,"Banking77Classification":81.64,"EmotionClassification":48.29,"ImdbClassification":95.49,"MassiveIntentClassification (en)":71.29,"MassiveScenarioClassification (en)":76.74,"MTOPDomainClassification (en)":96.3,"MTOPIntentClassification (en)":67.93,"ToxicConversationsClassification":75.45,"TweetSentimentExtractionClassification":59.44} -{"index":53,"Rank":62,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.5,"AmazonCounterfactualClassification (en)":72.39,"AmazonPolarityClassification":93.71,"AmazonReviewsClassification (en)":50.85,"Banking77Classification":85.41,"EmotionClassification":55.93,"ImdbClassification":93.57,"MassiveIntentClassification (en)":73.88,"MassiveScenarioClassification (en)":77.42,"MTOPDomainClassification (en)":94.25,"MTOPIntentClassification (en)":67.51,"ToxicConversationsClassification":67.3,"TweetSentimentExtractionClassification":61.76} -{"index":33,"Rank":63,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.31,"AmazonCounterfactualClassification (en)":78.63,"AmazonPolarityClassification":94.8,"AmazonReviewsClassification (en)":51.02,"Banking77Classification":79.7,"EmotionClassification":52.74,"ImdbClassification":92.17,"MassiveIntentClassification (en)":70.01,"MassiveScenarioClassification (en)":76.34,"MTOPDomainClassification (en)":93.63,"MTOPIntentClassification (en)":64.93,"ToxicConversationsClassification":73.04,"TweetSentimentExtractionClassification":64.72} -{"index":24,"Rank":64,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":74.14,"AmazonCounterfactualClassification (en)":73.79,"AmazonPolarityClassification":92.75,"AmazonReviewsClassification (en)":46.99,"Banking77Classification":85.74,"EmotionClassification":47.84,"ImdbClassification":90.61,"MassiveIntentClassification (en)":74.81,"MassiveScenarioClassification (en)":78.7,"MTOPDomainClassification (en)":93.36,"MTOPIntentClassification (en)":74.75,"ToxicConversationsClassification":69.89,"TweetSentimentExtractionClassification":60.51} -{"index":193,"Rank":65,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.14,"AmazonCounterfactualClassification (en)":74.76,"AmazonPolarityClassification":93.26,"AmazonReviewsClassification (en)":46.16,"Banking77Classification":86.65,"EmotionClassification":49.32,"ImdbClassification":90.4,"MassiveIntentClassification (en)":73.87,"MassiveScenarioClassification (en)":78.17,"MTOPDomainClassification (en)":93.1,"MTOPIntentClassification (en)":73.24,"ToxicConversationsClassification":71.53,"TweetSentimentExtractionClassification":59.25} -{"index":26,"Rank":66,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.12,"AmazonCounterfactualClassification (en)":76.85,"AmazonPolarityClassification":91.52,"AmazonReviewsClassification (en)":47.36,"Banking77Classification":84.54,"EmotionClassification":48.35,"ImdbClassification":85.56,"MassiveIntentClassification (en)":73.72,"MassiveScenarioClassification (en)":77.8,"MTOPDomainClassification (en)":93.41,"MTOPIntentClassification (en)":77.25,"ToxicConversationsClassification":71.5,"TweetSentimentExtractionClassification":61.54} -{"index":28,"Rank":67,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.12,"AmazonCounterfactualClassification (en)":76.85,"AmazonPolarityClassification":91.52,"AmazonReviewsClassification (en)":47.36,"Banking77Classification":84.54,"EmotionClassification":48.35,"ImdbClassification":85.56,"MassiveIntentClassification (en)":73.72,"MassiveScenarioClassification (en)":77.8,"MTOPDomainClassification (en)":93.41,"MTOPIntentClassification (en)":77.25,"ToxicConversationsClassification":71.5,"TweetSentimentExtractionClassification":61.54} -{"index":206,"Rank":68,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":74.12,"AmazonCounterfactualClassification (en)":76.85,"AmazonPolarityClassification":91.52,"AmazonReviewsClassification (en)":47.36,"Banking77Classification":84.54,"EmotionClassification":48.35,"ImdbClassification":85.56,"MassiveIntentClassification (en)":73.72,"MassiveScenarioClassification (en)":77.8,"MTOPDomainClassification (en)":93.41,"MTOPIntentClassification (en)":77.25,"ToxicConversationsClassification":71.5,"TweetSentimentExtractionClassification":61.54} -{"index":129,"Rank":69,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":74.12,"AmazonCounterfactualClassification (en)":76.85,"AmazonPolarityClassification":91.52,"AmazonReviewsClassification (en)":47.36,"Banking77Classification":84.54,"EmotionClassification":48.35,"ImdbClassification":85.56,"MassiveIntentClassification (en)":73.72,"MassiveScenarioClassification (en)":77.8,"MTOPDomainClassification (en)":93.41,"MTOPIntentClassification (en)":77.25,"ToxicConversationsClassification":71.5,"TweetSentimentExtractionClassification":61.54} -{"index":29,"Rank":70,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.12,"AmazonCounterfactualClassification (en)":76.85,"AmazonPolarityClassification":91.52,"AmazonReviewsClassification (en)":47.36,"Banking77Classification":84.54,"EmotionClassification":48.35,"ImdbClassification":85.56,"MassiveIntentClassification (en)":73.72,"MassiveScenarioClassification (en)":77.8,"MTOPDomainClassification (en)":93.41,"MTOPIntentClassification (en)":77.25,"ToxicConversationsClassification":71.5,"TweetSentimentExtractionClassification":61.54} -{"index":27,"Rank":71,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.12,"AmazonCounterfactualClassification (en)":76.85,"AmazonPolarityClassification":91.52,"AmazonReviewsClassification (en)":47.36,"Banking77Classification":84.54,"EmotionClassification":48.35,"ImdbClassification":85.56,"MassiveIntentClassification (en)":73.72,"MassiveScenarioClassification (en)":77.8,"MTOPDomainClassification (en)":93.41,"MTOPIntentClassification (en)":77.25,"ToxicConversationsClassification":71.5,"TweetSentimentExtractionClassification":61.54} -{"index":65,"Rank":72,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":74.07,"AmazonCounterfactualClassification (en)":76.94,"AmazonPolarityClassification":85.29,"AmazonReviewsClassification (en)":47.09,"Banking77Classification":86.16,"EmotionClassification":48.88,"ImdbClassification":77.95,"MassiveIntentClassification (en)":76.65,"MassiveScenarioClassification (en)":79.99,"MTOPDomainClassification (en)":95.48,"MTOPIntentClassification (en)":82.84,"ToxicConversationsClassification":70.71,"TweetSentimentExtractionClassification":60.9} -{"index":170,"Rank":73,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.96,"AmazonCounterfactualClassification (en)":70.85,"AmazonPolarityClassification":91.81,"AmazonReviewsClassification (en)":48.94,"Banking77Classification":84.61,"EmotionClassification":54.9,"ImdbClassification":93.14,"MassiveIntentClassification (en)":73.49,"MassiveScenarioClassification (en)":77.38,"MTOPDomainClassification (en)":93.64,"MTOPIntentClassification (en)":66.01,"ToxicConversationsClassification":71.19,"TweetSentimentExtractionClassification":61.55} -{"index":148,"Rank":74,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":73.86,"AmazonCounterfactualClassification (en)":88.13,"AmazonPolarityClassification":91.53,"AmazonReviewsClassification (en)":47.86,"Banking77Classification":78.51,"EmotionClassification":52.73,"ImdbClassification":88.32,"MassiveIntentClassification (en)":68.9,"MassiveScenarioClassification (en)":73.35,"MTOPDomainClassification (en)":93.89,"MTOPIntentClassification (en)":67.98,"ToxicConversationsClassification":71.05,"TweetSentimentExtractionClassification":64.13} -{"index":140,"Rank":75,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":73.84,"AmazonCounterfactualClassification (en)":77.78,"AmazonPolarityClassification":92.81,"AmazonReviewsClassification (en)":46.71,"Banking77Classification":83.53,"EmotionClassification":46.95,"ImdbClassification":86.15,"MassiveIntentClassification (en)":73.04,"MassiveScenarioClassification (en)":77.65,"MTOPDomainClassification (en)":93.69,"MTOPIntentClassification (en)":75.31,"ToxicConversationsClassification":72.11,"TweetSentimentExtractionClassification":60.39} -{"index":153,"Rank":76,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":73.84,"AmazonCounterfactualClassification (en)":77.78,"AmazonPolarityClassification":92.81,"AmazonReviewsClassification (en)":46.71,"Banking77Classification":83.53,"EmotionClassification":46.95,"ImdbClassification":86.15,"MassiveIntentClassification (en)":73.04,"MassiveScenarioClassification (en)":77.65,"MTOPDomainClassification (en)":93.69,"MTOPIntentClassification (en)":75.31,"ToxicConversationsClassification":72.11,"TweetSentimentExtractionClassification":60.39} -{"index":207,"Rank":77,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.65,"AmazonCounterfactualClassification (en)":78.67,"AmazonPolarityClassification":90.41,"AmazonReviewsClassification (en)":47.81,"Banking77Classification":83.82,"EmotionClassification":48.81,"ImdbClassification":83.78,"MassiveIntentClassification (en)":72.76,"MassiveScenarioClassification (en)":76.7,"MTOPDomainClassification (en)":93.47,"MTOPIntentClassification (en)":75.27,"ToxicConversationsClassification":71.39,"TweetSentimentExtractionClassification":60.87} -{"index":213,"Rank":78,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.55,"AmazonCounterfactualClassification (en)":75.21,"AmazonPolarityClassification":91.81,"AmazonReviewsClassification (en)":47.16,"Banking77Classification":84.25,"EmotionClassification":47.99,"ImdbClassification":85.31,"MassiveIntentClassification (en)":73.46,"MassiveScenarioClassification (en)":77.08,"MTOPDomainClassification (en)":93.01,"MTOPIntentClassification (en)":75.03,"ToxicConversationsClassification":71.42,"TweetSentimentExtractionClassification":60.92} -{"index":202,"Rank":79,"Model":"bge-small-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.52,"AmazonCounterfactualClassification (en)":74.19,"AmazonPolarityClassification":91.89,"AmazonReviewsClassification (en)":46.72,"Banking77Classification":85.1,"EmotionClassification":46.84,"ImdbClassification":89.35,"MassiveIntentClassification (en)":74.01,"MassiveScenarioClassification (en)":77.96,"MTOPDomainClassification (en)":92.67,"MTOPIntentClassification (en)":74.03,"ToxicConversationsClassification":69.5,"TweetSentimentExtractionClassification":60.02} -{"index":175,"Rank":80,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":73.45,"AmazonCounterfactualClassification (en)":74.73,"AmazonPolarityClassification":88.54,"AmazonReviewsClassification (en)":45.26,"Banking77Classification":84.01,"EmotionClassification":48.77,"ImdbClassification":79.44,"MassiveIntentClassification (en)":71.93,"MassiveScenarioClassification (en)":74.49,"MTOPDomainClassification (en)":95.68,"MTOPIntentClassification (en)":83.15,"ToxicConversationsClassification":73.35,"TweetSentimentExtractionClassification":62.06} -{"index":135,"Rank":81,"Model":"jina-embeddings-v2-base-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.45,"AmazonCounterfactualClassification (en)":74.73,"AmazonPolarityClassification":88.54,"AmazonReviewsClassification (en)":45.26,"Banking77Classification":84.01,"EmotionClassification":48.77,"ImdbClassification":79.44,"MassiveIntentClassification (en)":71.93,"MassiveScenarioClassification (en)":74.49,"MTOPDomainClassification (en)":95.68,"MTOPIntentClassification (en)":83.15,"ToxicConversationsClassification":73.35,"TweetSentimentExtractionClassification":62.06} -{"index":246,"Rank":82,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":73.42,"AmazonCounterfactualClassification (en)":77.07,"AmazonPolarityClassification":92.79,"AmazonReviewsClassification (en)":48.93,"Banking77Classification":82.31,"EmotionClassification":48.57,"ImdbClassification":90.23,"MassiveIntentClassification (en)":73.44,"MassiveScenarioClassification (en)":74.82,"MTOPDomainClassification (en)":92.49,"MTOPIntentClassification (en)":68.33,"ToxicConversationsClassification":70.04,"TweetSentimentExtractionClassification":62.01} -{"index":253,"Rank":83,"Model":"gte-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.33,"AmazonCounterfactualClassification (en)":72.63,"AmazonPolarityClassification":92.52,"AmazonReviewsClassification (en)":49.07,"Banking77Classification":86.06,"EmotionClassification":47.88,"ImdbClassification":88.46,"MassiveIntentClassification (en)":72.62,"MassiveScenarioClassification (en)":76.77,"MTOPDomainClassification (en)":93.51,"MTOPIntentClassification (en)":73.25,"ToxicConversationsClassification":70.56,"TweetSentimentExtractionClassification":56.58} -{"index":211,"Rank":84,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":73.24,"AmazonCounterfactualClassification (en)":74.27,"AmazonPolarityClassification":91.89,"AmazonReviewsClassification (en)":46.97,"Banking77Classification":84.15,"EmotionClassification":47.73,"ImdbClassification":85.47,"MassiveIntentClassification (en)":73.07,"MassiveScenarioClassification (en)":76.82,"MTOPDomainClassification (en)":92.62,"MTOPIntentClassification (en)":74.27,"ToxicConversationsClassification":71.25,"TweetSentimentExtractionClassification":60.4} -{"index":284,"Rank":85,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.21,"AmazonCounterfactualClassification (en)":76.42,"AmazonPolarityClassification":90.84,"AmazonReviewsClassification (en)":45.73,"Banking77Classification":83.01,"EmotionClassification":50.63,"ImdbClassification":83.66,"MassiveIntentClassification (en)":72.86,"MassiveScenarioClassification (en)":76.84,"MTOPDomainClassification (en)":93.91,"MTOPIntentClassification (en)":70.98,"ToxicConversationsClassification":71.91,"TweetSentimentExtractionClassification":61.72} -{"index":154,"Rank":86,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":73.14,"AmazonCounterfactualClassification (en)":77.69,"AmazonPolarityClassification":90.05,"AmazonReviewsClassification (en)":43.02,"Banking77Classification":84.14,"EmotionClassification":48.05,"ImdbClassification":82.11,"MassiveIntentClassification (en)":73.22,"MassiveScenarioClassification (en)":77.39,"MTOPDomainClassification (en)":93.86,"MTOPIntentClassification (en)":76.4,"ToxicConversationsClassification":70.56,"TweetSentimentExtractionClassification":61.21} -{"index":124,"Rank":87,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":73.13,"AmazonCounterfactualClassification (en)":75.63,"AmazonPolarityClassification":91.01,"AmazonReviewsClassification (en)":46.99,"Banking77Classification":81.93,"EmotionClassification":50.16,"ImdbClassification":87.84,"MassiveIntentClassification (en)":71.08,"MassiveScenarioClassification (en)":76.64,"MTOPDomainClassification (en)":93.36,"MTOPIntentClassification (en)":66.58,"ToxicConversationsClassification":72.6,"TweetSentimentExtractionClassification":63.71} -{"index":149,"Rank":88,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":73.12,"AmazonCounterfactualClassification (en)":85.09,"AmazonPolarityClassification":86.54,"AmazonReviewsClassification (en)":42.96,"Banking77Classification":82.66,"EmotionClassification":53.24,"ImdbClassification":79.79,"MassiveIntentClassification (en)":71.48,"MassiveScenarioClassification (en)":76.47,"MTOPDomainClassification (en)":95.07,"MTOPIntentClassification (en)":72.26,"ToxicConversationsClassification":70.33,"TweetSentimentExtractionClassification":61.58} -{"index":159,"Rank":89,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":73.02,"AmazonCounterfactualClassification (en)":78.97,"AmazonPolarityClassification":90.64,"AmazonReviewsClassification (en)":44.55,"Banking77Classification":82.74,"EmotionClassification":45.18,"ImdbClassification":85.46,"MassiveIntentClassification (en)":72.11,"MassiveScenarioClassification (en)":77.08,"MTOPDomainClassification (en)":93.13,"MTOPIntentClassification (en)":75.27,"ToxicConversationsClassification":69.78,"TweetSentimentExtractionClassification":61.28} -{"index":252,"Rank":90,"Model":"gte-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.01,"AmazonCounterfactualClassification (en)":74.18,"AmazonPolarityClassification":91.77,"AmazonReviewsClassification (en)":48.96,"Banking77Classification":85.07,"EmotionClassification":48.65,"ImdbClassification":85.95,"MassiveIntentClassification (en)":71.47,"MassiveScenarioClassification (en)":76.38,"MTOPDomainClassification (en)":93.03,"MTOPIntentClassification (en)":72.04,"ToxicConversationsClassification":71.61,"TweetSentimentExtractionClassification":57.01} -{"index":158,"Rank":91,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.94,"AmazonCounterfactualClassification (en)":77.6,"AmazonPolarityClassification":91.27,"AmazonReviewsClassification (en)":45.88,"Banking77Classification":81.64,"EmotionClassification":47.06,"ImdbClassification":86.0,"MassiveIntentClassification (en)":71.62,"MassiveScenarioClassification (en)":76.36,"MTOPDomainClassification (en)":92.7,"MTOPIntentClassification (en)":72.56,"ToxicConversationsClassification":71.1,"TweetSentimentExtractionClassification":61.46} -{"index":245,"Rank":92,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":72.84,"AmazonCounterfactualClassification (en)":76.01,"AmazonPolarityClassification":93.17,"AmazonReviewsClassification (en)":48.18,"Banking77Classification":80.88,"EmotionClassification":51.95,"ImdbClassification":87.54,"MassiveIntentClassification (en)":72.09,"MassiveScenarioClassification (en)":73.26,"MTOPDomainClassification (en)":90.73,"MTOPIntentClassification (en)":68.15,"ToxicConversationsClassification":70.95,"TweetSentimentExtractionClassification":61.21} -{"index":116,"Rank":93,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.72,"AmazonCounterfactualClassification (en)":72.9,"AmazonPolarityClassification":87.19,"AmazonReviewsClassification (en)":42.56,"Banking77Classification":84.24,"EmotionClassification":52.06,"ImdbClassification":78.54,"MassiveIntentClassification (en)":73.18,"MassiveScenarioClassification (en)":76.68,"MTOPDomainClassification (en)":94.78,"MTOPIntentClassification (en)":77.72,"ToxicConversationsClassification":72.9,"TweetSentimentExtractionClassification":59.85} -{"index":152,"Rank":94,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":72.63,"AmazonCounterfactualClassification (en)":79.72,"AmazonPolarityClassification":87.96,"AmazonReviewsClassification (en)":42.65,"Banking77Classification":83.33,"EmotionClassification":49.44,"ImdbClassification":75.96,"MassiveIntentClassification (en)":72.25,"MassiveScenarioClassification (en)":76.76,"MTOPDomainClassification (en)":93.21,"MTOPIntentClassification (en)":74.77,"ToxicConversationsClassification":74.12,"TweetSentimentExtractionClassification":61.38} -{"index":199,"Rank":95,"Model":"bge-base-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.62,"AmazonCounterfactualClassification (en)":75.39,"AmazonPolarityClassification":90.73,"AmazonReviewsClassification (en)":45.49,"Banking77Classification":84.3,"EmotionClassification":46.7,"ImdbClassification":83.15,"MassiveIntentClassification (en)":73.18,"MassiveScenarioClassification (en)":77.54,"MTOPDomainClassification (en)":92.05,"MTOPIntentClassification (en)":72.49,"ToxicConversationsClassification":70.92,"TweetSentimentExtractionClassification":59.49} -{"index":147,"Rank":96,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":72.36,"AmazonCounterfactualClassification (en)":86.21,"AmazonPolarityClassification":88.36,"AmazonReviewsClassification (en)":44.64,"Banking77Classification":77.04,"EmotionClassification":51.76,"ImdbClassification":81.17,"MassiveIntentClassification (en)":67.48,"MassiveScenarioClassification (en)":72.59,"MTOPDomainClassification (en)":93.72,"MTOPIntentClassification (en)":70.26,"ToxicConversationsClassification":71.82,"TweetSentimentExtractionClassification":63.31} -{"index":254,"Rank":97,"Model":"gte-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.31,"AmazonCounterfactualClassification (en)":73.22,"AmazonPolarityClassification":91.82,"AmazonReviewsClassification (en)":48.03,"Banking77Classification":84.08,"EmotionClassification":46.56,"ImdbClassification":86.8,"MassiveIntentClassification (en)":70.35,"MassiveScenarioClassification (en)":75.64,"MTOPDomainClassification (en)":93.05,"MTOPIntentClassification (en)":69.65,"ToxicConversationsClassification":70.33,"TweetSentimentExtractionClassification":58.22} -{"index":244,"Rank":98,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":72.31,"AmazonCounterfactualClassification (en)":75.51,"AmazonPolarityClassification":92.87,"AmazonReviewsClassification (en)":47.12,"Banking77Classification":78.46,"EmotionClassification":51.74,"ImdbClassification":87.01,"MassiveIntentClassification (en)":71.78,"MassiveScenarioClassification (en)":73.16,"MTOPDomainClassification (en)":90.99,"MTOPIntentClassification (en)":64.98,"ToxicConversationsClassification":71.73,"TweetSentimentExtractionClassification":62.33} -{"index":185,"Rank":99,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.25,"AmazonCounterfactualClassification (en)":66.94,"AmazonPolarityClassification":94.7,"AmazonReviewsClassification (en)":51.59,"Banking77Classification":80.34,"EmotionClassification":47.83,"ImdbClassification":89.66,"MassiveIntentClassification (en)":70.27,"MassiveScenarioClassification (en)":74.51,"MTOPDomainClassification (en)":93.77,"MTOPIntentClassification (en)":67.8,"ToxicConversationsClassification":70.85,"TweetSentimentExtractionClassification":58.71} -{"index":66,"Rank":100,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":72.21,"AmazonCounterfactualClassification (en)":77.42,"AmazonPolarityClassification":82.05,"AmazonReviewsClassification (en)":40.81,"Banking77Classification":86.01,"EmotionClassification":48.38,"ImdbClassification":75.33,"MassiveIntentClassification (en)":75.58,"MassiveScenarioClassification (en)":79.16,"MTOPDomainClassification (en)":94.09,"MTOPIntentClassification (en)":77.05,"ToxicConversationsClassification":69.92,"TweetSentimentExtractionClassification":60.76} -{"index":169,"Rank":101,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.13,"AmazonCounterfactualClassification (en)":73.12,"AmazonPolarityClassification":88.89,"AmazonReviewsClassification (en)":43.2,"Banking77Classification":85.36,"EmotionClassification":48.77,"ImdbClassification":78.46,"MassiveIntentClassification (en)":72.04,"MassiveScenarioClassification (en)":76.86,"MTOPDomainClassification (en)":93.43,"MTOPIntentClassification (en)":71.73,"ToxicConversationsClassification":71.58,"TweetSentimentExtractionClassification":62.14} -{"index":210,"Rank":102,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":72.1,"AmazonCounterfactualClassification (en)":72.94,"AmazonPolarityClassification":91.35,"AmazonReviewsClassification (en)":45.73,"Banking77Classification":83.69,"EmotionClassification":45.88,"ImdbClassification":83.99,"MassiveIntentClassification (en)":71.76,"MassiveScenarioClassification (en)":75.67,"MTOPDomainClassification (en)":91.68,"MTOPIntentClassification (en)":72.47,"ToxicConversationsClassification":70.87,"TweetSentimentExtractionClassification":59.2} -{"index":282,"Rank":103,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.97,"AmazonCounterfactualClassification (en)":73.96,"AmazonPolarityClassification":91.32,"AmazonReviewsClassification (en)":46.03,"Banking77Classification":83.19,"EmotionClassification":45.8,"ImdbClassification":85.93,"MassiveIntentClassification (en)":71.12,"MassiveScenarioClassification (en)":75.56,"MTOPDomainClassification (en)":92.76,"MTOPIntentClassification (en)":70.45,"ToxicConversationsClassification":68.52,"TweetSentimentExtractionClassification":58.98} -{"index":167,"Rank":104,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.91,"AmazonCounterfactualClassification (en)":78.9,"AmazonPolarityClassification":87.98,"AmazonReviewsClassification (en)":42.58,"Banking77Classification":84.16,"EmotionClassification":47.88,"ImdbClassification":77.7,"MassiveIntentClassification (en)":70.39,"MassiveScenarioClassification (en)":77.19,"MTOPDomainClassification (en)":92.34,"MTOPIntentClassification (en)":70.63,"ToxicConversationsClassification":72.13,"TweetSentimentExtractionClassification":61.04} -{"index":63,"Rank":105,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":71.88,"AmazonCounterfactualClassification (en)":75.7,"AmazonPolarityClassification":80.68,"AmazonReviewsClassification (en)":40.0,"Banking77Classification":84.77,"EmotionClassification":47.08,"ImdbClassification":75.19,"MassiveIntentClassification (en)":75.01,"MassiveScenarioClassification (en)":79.16,"MTOPDomainClassification (en)":94.47,"MTOPIntentClassification (en)":81.09,"ToxicConversationsClassification":71.85,"TweetSentimentExtractionClassification":57.61} -{"index":270,"Rank":106,"Model":"gte-small-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.7,"AmazonCounterfactualClassification (en)":72.88,"AmazonPolarityClassification":91.03,"AmazonReviewsClassification (en)":46.94,"Banking77Classification":83.47,"EmotionClassification":45.8,"ImdbClassification":85.01,"MassiveIntentClassification (en)":70.23,"MassiveScenarioClassification (en)":75.29,"MTOPDomainClassification (en)":92.51,"MTOPIntentClassification (en)":69.62,"ToxicConversationsClassification":69.8,"TweetSentimentExtractionClassification":57.82} -{"index":157,"Rank":107,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":71.67,"AmazonCounterfactualClassification (en)":76.22,"AmazonPolarityClassification":87.53,"AmazonReviewsClassification (en)":42.61,"Banking77Classification":81.87,"EmotionClassification":46.86,"ImdbClassification":75.55,"MassiveIntentClassification (en)":72.22,"MassiveScenarioClassification (en)":75.78,"MTOPDomainClassification (en)":92.05,"MTOPIntentClassification (en)":73.24,"ToxicConversationsClassification":72.76,"TweetSentimentExtractionClassification":63.31} -{"index":61,"Rank":108,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":71.57,"AmazonCounterfactualClassification (en)":76.91,"AmazonPolarityClassification":79.05,"AmazonReviewsClassification (en)":40.08,"Banking77Classification":84.65,"EmotionClassification":46.58,"ImdbClassification":75.68,"MassiveIntentClassification (en)":73.84,"MassiveScenarioClassification (en)":79.17,"MTOPDomainClassification (en)":94.33,"MTOPIntentClassification (en)":79.54,"ToxicConversationsClassification":71.81,"TweetSentimentExtractionClassification":57.17} -{"index":208,"Rank":109,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.17,"AmazonCounterfactualClassification (en)":76.99,"AmazonPolarityClassification":87.54,"AmazonReviewsClassification (en)":46.81,"Banking77Classification":81.09,"EmotionClassification":47.65,"ImdbClassification":86.32,"MassiveIntentClassification (en)":67.75,"MassiveScenarioClassification (en)":74.03,"MTOPDomainClassification (en)":92.36,"MTOPIntentClassification (en)":65.2,"ToxicConversationsClassification":72.96,"TweetSentimentExtractionClassification":55.28} -{"index":281,"Rank":110,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.93,"AmazonCounterfactualClassification (en)":75.94,"AmazonPolarityClassification":86.72,"AmazonReviewsClassification (en)":44.78,"Banking77Classification":80.66,"EmotionClassification":48.74,"ImdbClassification":77.98,"MassiveIntentClassification (en)":70.15,"MassiveScenarioClassification (en)":75.33,"MTOPDomainClassification (en)":92.13,"MTOPIntentClassification (en)":64.68,"ToxicConversationsClassification":72.29,"TweetSentimentExtractionClassification":61.81} -{"index":162,"Rank":111,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":70.74,"AmazonCounterfactualClassification (en)":73.79,"AmazonPolarityClassification":88.7,"AmazonReviewsClassification (en)":44.7,"Banking77Classification":79.42,"EmotionClassification":42.45,"ImdbClassification":80.82,"MassiveIntentClassification (en)":70.3,"MassiveScenarioClassification (en)":74.48,"MTOPDomainClassification (en)":91.07,"MTOPIntentClassification (en)":71.08,"ToxicConversationsClassification":69.39,"TweetSentimentExtractionClassification":62.62} -{"index":112,"Rank":112,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.62,"AmazonCounterfactualClassification (en)":79.31,"AmazonPolarityClassification":76.66,"AmazonReviewsClassification (en)":35.28,"Banking77Classification":84.31,"EmotionClassification":55.61,"ImdbClassification":82.39,"MassiveIntentClassification (en)":69.24,"MassiveScenarioClassification (en)":74.11,"MTOPDomainClassification (en)":90.46,"MTOPIntentClassification (en)":66.0,"ToxicConversationsClassification":74.52,"TweetSentimentExtractionClassification":59.59} -{"index":35,"Rank":113,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.57,"AmazonCounterfactualClassification (en)":70.03,"AmazonPolarityClassification":90.7,"AmazonReviewsClassification (en)":46.55,"Banking77Classification":78.12,"EmotionClassification":46.66,"ImdbClassification":85.64,"MassiveIntentClassification (en)":67.42,"MassiveScenarioClassification (en)":72.61,"MTOPDomainClassification (en)":91.86,"MTOPIntentClassification (en)":62.18,"ToxicConversationsClassification":70.66,"TweetSentimentExtractionClassification":64.46} -{"index":184,"Rank":114,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.55,"AmazonCounterfactualClassification (en)":68.61,"AmazonPolarityClassification":93.38,"AmazonReviewsClassification (en)":50.64,"Banking77Classification":78.5,"EmotionClassification":46.37,"ImdbClassification":88.54,"MassiveIntentClassification (en)":67.24,"MassiveScenarioClassification (en)":72.98,"MTOPDomainClassification (en)":90.48,"MTOPIntentClassification (en)":59.82,"ToxicConversationsClassification":71.16,"TweetSentimentExtractionClassification":58.89} -{"index":277,"Rank":115,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.44,"AmazonCounterfactualClassification (en)":76.4,"AmazonPolarityClassification":92.83,"AmazonReviewsClassification (en)":47.45,"Banking77Classification":68.04,"EmotionClassification":50.33,"ImdbClassification":89.38,"MassiveIntentClassification (en)":65.17,"MassiveScenarioClassification (en)":67.67,"MTOPDomainClassification (en)":89.89,"MTOPIntentClassification (en)":64.8,"ToxicConversationsClassification":70.0,"TweetSentimentExtractionClassification":63.35} -{"index":107,"Rank":116,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.35,"AmazonCounterfactualClassification (en)":71.76,"AmazonPolarityClassification":86.61,"AmazonReviewsClassification (en)":42.61,"Banking77Classification":81.73,"EmotionClassification":44.71,"ImdbClassification":80.54,"MassiveIntentClassification (en)":70.13,"MassiveScenarioClassification (en)":74.86,"MTOPDomainClassification (en)":91.76,"MTOPIntentClassification (en)":69.9,"ToxicConversationsClassification":70.97,"TweetSentimentExtractionClassification":58.57} -{"index":69,"Rank":117,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.25,"AmazonCounterfactualClassification (en)":72.1,"AmazonPolarityClassification":86.69,"AmazonReviewsClassification (en)":42.7,"Banking77Classification":81.92,"EmotionClassification":45.44,"ImdbClassification":80.8,"MassiveIntentClassification (en)":70.35,"MassiveScenarioClassification (en)":74.89,"MTOPDomainClassification (en)":92.12,"MTOPIntentClassification (en)":71.86,"ToxicConversationsClassification":65.46,"TweetSentimentExtractionClassification":58.66} -{"index":42,"Rank":118,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":70.21,"AmazonCounterfactualClassification (en)":71.1,"AmazonPolarityClassification":86.69,"AmazonReviewsClassification (en)":45.51,"Banking77Classification":79.36,"EmotionClassification":48.79,"ImdbClassification":82.25,"MassiveIntentClassification (en)":71.52,"MassiveScenarioClassification (en)":73.87,"MTOPDomainClassification (en)":92.67,"MTOPIntentClassification (en)":69.77,"ToxicConversationsClassification":63.9,"TweetSentimentExtractionClassification":57.14} -{"index":166,"Rank":119,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.17,"AmazonCounterfactualClassification (en)":69.94,"AmazonPolarityClassification":87.19,"AmazonReviewsClassification (en)":41.08,"Banking77Classification":82.89,"EmotionClassification":46.84,"ImdbClassification":74.45,"MassiveIntentClassification (en)":67.93,"MassiveScenarioClassification (en)":75.72,"MTOPDomainClassification (en)":92.18,"MTOPIntentClassification (en)":70.3,"ToxicConversationsClassification":72.01,"TweetSentimentExtractionClassification":61.51} -{"index":84,"Rank":120,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.14,"AmazonCounterfactualClassification (en)":74.07,"AmazonPolarityClassification":82.31,"AmazonReviewsClassification (en)":41.58,"Banking77Classification":81.74,"EmotionClassification":49.92,"ImdbClassification":74.33,"MassiveIntentClassification (en)":70.0,"MassiveScenarioClassification (en)":75.03,"MTOPDomainClassification (en)":89.64,"MTOPIntentClassification (en)":70.68,"ToxicConversationsClassification":69.93,"TweetSentimentExtractionClassification":62.44} -{"index":243,"Rank":121,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":69.81,"AmazonCounterfactualClassification (en)":75.82,"AmazonPolarityClassification":85.12,"AmazonReviewsClassification (en)":44.94,"Banking77Classification":76.48,"EmotionClassification":51.35,"ImdbClassification":77.34,"MassiveIntentClassification (en)":69.74,"MassiveScenarioClassification (en)":72.32,"MTOPDomainClassification (en)":90.34,"MTOPIntentClassification (en)":63.32,"ToxicConversationsClassification":68.2,"TweetSentimentExtractionClassification":62.71} -{"index":209,"Rank":122,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":69.7,"AmazonCounterfactualClassification (en)":69.78,"AmazonPolarityClassification":88.74,"AmazonReviewsClassification (en)":43.11,"Banking77Classification":82.78,"EmotionClassification":42.92,"ImdbClassification":80.87,"MassiveIntentClassification (en)":69.34,"MassiveScenarioClassification (en)":74.21,"MTOPDomainClassification (en)":89.61,"MTOPIntentClassification (en)":68.9,"ToxicConversationsClassification":68.16,"TweetSentimentExtractionClassification":57.99} -{"index":183,"Rank":123,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.65,"AmazonCounterfactualClassification (en)":70.12,"AmazonPolarityClassification":92.95,"AmazonReviewsClassification (en)":50.52,"Banking77Classification":75.59,"EmotionClassification":45.98,"ImdbClassification":90.22,"MassiveIntentClassification (en)":65.03,"MassiveScenarioClassification (en)":71.05,"MTOPDomainClassification (en)":88.63,"MTOPIntentClassification (en)":58.08,"ToxicConversationsClassification":69.33,"TweetSentimentExtractionClassification":58.25} -{"index":176,"Rank":124,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.02,"AmazonCounterfactualClassification (en)":74.25,"AmazonPolarityClassification":78.31,"AmazonReviewsClassification (en)":38.32,"Banking77Classification":85.26,"EmotionClassification":46.58,"ImdbClassification":67.46,"MassiveIntentClassification (en)":70.76,"MassiveScenarioClassification (en)":73.82,"MTOPDomainClassification (en)":90.37,"MTOPIntentClassification (en)":71.97,"ToxicConversationsClassification":70.05,"TweetSentimentExtractionClassification":61.06} -{"index":177,"Rank":125,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.82,"AmazonCounterfactualClassification (en)":71.36,"AmazonPolarityClassification":82.9,"AmazonReviewsClassification (en)":40.89,"Banking77Classification":78.25,"EmotionClassification":44.01,"ImdbClassification":73.64,"MassiveIntentClassification (en)":67.61,"MassiveScenarioClassification (en)":69.75,"MTOPDomainClassification (en)":93.96,"MTOPIntentClassification (en)":72.5,"ToxicConversationsClassification":71.54,"TweetSentimentExtractionClassification":59.4} -{"index":136,"Rank":126,"Model":"jina-embeddings-v2-small-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.82,"AmazonCounterfactualClassification (en)":71.36,"AmazonPolarityClassification":82.9,"AmazonReviewsClassification (en)":40.89,"Banking77Classification":78.25,"EmotionClassification":44.01,"ImdbClassification":73.64,"MassiveIntentClassification (en)":67.61,"MassiveScenarioClassification (en)":69.75,"MTOPDomainClassification (en)":93.96,"MTOPIntentClassification (en)":72.5,"ToxicConversationsClassification":71.54,"TweetSentimentExtractionClassification":59.4} -{"index":203,"Rank":127,"Model":"bge-small-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.55,"AmazonCounterfactualClassification (en)":70.72,"AmazonPolarityClassification":83.34,"AmazonReviewsClassification (en)":40.99,"Banking77Classification":81.25,"EmotionClassification":41.66,"ImdbClassification":74.81,"MassiveIntentClassification (en)":69.8,"MassiveScenarioClassification (en)":74.54,"MTOPDomainClassification (en)":91.22,"MTOPIntentClassification (en)":69.39,"ToxicConversationsClassification":67.44,"TweetSentimentExtractionClassification":57.48} -{"index":100,"Rank":128,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.23,"AmazonCounterfactualClassification (en)":76.81,"AmazonPolarityClassification":82.83,"AmazonReviewsClassification (en)":38.93,"Banking77Classification":80.34,"EmotionClassification":46.54,"ImdbClassification":74.08,"MassiveIntentClassification (en)":66.92,"MassiveScenarioClassification (en)":72.75,"MTOPDomainClassification (en)":92.73,"MTOPIntentClassification (en)":65.18,"ToxicConversationsClassification":64.93,"TweetSentimentExtractionClassification":56.73} -{"index":83,"Rank":129,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.13,"AmazonCounterfactualClassification (en)":69.22,"AmazonPolarityClassification":71.26,"AmazonReviewsClassification (en)":39.19,"Banking77Classification":84.49,"EmotionClassification":49.66,"ImdbClassification":66.64,"MassiveIntentClassification (en)":70.39,"MassiveScenarioClassification (en)":76.28,"MTOPDomainClassification (en)":93.47,"MTOPIntentClassification (en)":72.42,"ToxicConversationsClassification":67.71,"TweetSentimentExtractionClassification":56.85} -{"index":76,"Rank":130,"Model":"gte-micro-v4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.13,"AmazonCounterfactualClassification (en)":71.84,"AmazonPolarityClassification":80.04,"AmazonReviewsClassification (en)":39.75,"Banking77Classification":80.92,"EmotionClassification":44.88,"ImdbClassification":71.96,"MassiveIntentClassification (en)":69.11,"MassiveScenarioClassification (en)":74.16,"MTOPDomainClassification (en)":90.87,"MTOPIntentClassification (en)":68.53,"ToxicConversationsClassification":66.04,"TweetSentimentExtractionClassification":59.43} -{"index":168,"Rank":131,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.04,"AmazonCounterfactualClassification (en)":72.36,"AmazonPolarityClassification":83.81,"AmazonReviewsClassification (en)":38.97,"Banking77Classification":80.97,"EmotionClassification":44.77,"ImdbClassification":72.21,"MassiveIntentClassification (en)":66.09,"MassiveScenarioClassification (en)":73.26,"MTOPDomainClassification (en)":89.93,"MTOPIntentClassification (en)":66.56,"ToxicConversationsClassification":67.96,"TweetSentimentExtractionClassification":59.6} -{"index":106,"Rank":132,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.04,"AmazonCounterfactualClassification (en)":67.76,"AmazonPolarityClassification":79.75,"AmazonReviewsClassification (en)":37.45,"Banking77Classification":81.17,"EmotionClassification":44.53,"ImdbClassification":76.46,"MassiveIntentClassification (en)":68.58,"MassiveScenarioClassification (en)":73.92,"MTOPDomainClassification (en)":90.67,"MTOPIntentClassification (en)":68.29,"ToxicConversationsClassification":70.23,"TweetSentimentExtractionClassification":57.64} -{"index":101,"Rank":133,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.01,"AmazonCounterfactualClassification (en)":78.48,"AmazonPolarityClassification":78.74,"AmazonReviewsClassification (en)":39.93,"Banking77Classification":80.26,"EmotionClassification":47.26,"ImdbClassification":71.79,"MassiveIntentClassification (en)":66.55,"MassiveScenarioClassification (en)":73.11,"MTOPDomainClassification (en)":93.04,"MTOPIntentClassification (en)":63.52,"ToxicConversationsClassification":66.74,"TweetSentimentExtractionClassification":56.73} -{"index":174,"Rank":134,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.98,"AmazonCounterfactualClassification (en)":73.76,"AmazonPolarityClassification":77.52,"AmazonReviewsClassification (en)":38.5,"Banking77Classification":83.94,"EmotionClassification":44.64,"ImdbClassification":68.61,"MassiveIntentClassification (en)":69.11,"MassiveScenarioClassification (en)":73.74,"MTOPDomainClassification (en)":89.38,"MTOPIntentClassification (en)":66.49,"ToxicConversationsClassification":69.65,"TweetSentimentExtractionClassification":60.43} -{"index":258,"Rank":135,"Model":"universal-sentence-encoder-large-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.9,"AmazonCounterfactualClassification (en)":76.19,"AmazonPolarityClassification":69.63,"AmazonReviewsClassification (en)":35.53,"Banking77Classification":78.13,"EmotionClassification":45.48,"ImdbClassification":64.06,"MassiveIntentClassification (en)":71.69,"MassiveScenarioClassification (en)":77.2,"MTOPDomainClassification (en)":92.94,"MTOPIntentClassification (en)":74.43,"ToxicConversationsClassification":70.17,"TweetSentimentExtractionClassification":59.31} -{"index":68,"Rank":136,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.9,"AmazonCounterfactualClassification (en)":71.87,"AmazonPolarityClassification":78.79,"AmazonReviewsClassification (en)":39.31,"Banking77Classification":80.58,"EmotionClassification":44.79,"ImdbClassification":71.53,"MassiveIntentClassification (en)":68.84,"MassiveScenarioClassification (en)":73.75,"MTOPDomainClassification (en)":90.79,"MTOPIntentClassification (en)":68.72,"ToxicConversationsClassification":66.29,"TweetSentimentExtractionClassification":59.49} -{"index":113,"Rank":137,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.8,"AmazonCounterfactualClassification (en)":68.75,"AmazonPolarityClassification":81.3,"AmazonReviewsClassification (en)":38.57,"Banking77Classification":79.98,"EmotionClassification":40.09,"ImdbClassification":80.09,"MassiveIntentClassification (en)":67.58,"MassiveScenarioClassification (en)":73.47,"MTOPDomainClassification (en)":90.36,"MTOPIntentClassification (en)":66.45,"ToxicConversationsClassification":69.34,"TweetSentimentExtractionClassification":57.6} -{"index":71,"Rank":138,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.77,"AmazonCounterfactualClassification (en)":73.18,"AmazonPolarityClassification":79.99,"AmazonReviewsClassification (en)":39.66,"Banking77Classification":77.95,"EmotionClassification":44.38,"ImdbClassification":73.02,"MassiveIntentClassification (en)":67.75,"MassiveScenarioClassification (en)":72.36,"MTOPDomainClassification (en)":89.87,"MTOPIntentClassification (en)":71.03,"ToxicConversationsClassification":64.41,"TweetSentimentExtractionClassification":59.66} -{"index":172,"Rank":139,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.76,"AmazonCounterfactualClassification (en)":68.93,"AmazonPolarityClassification":69.14,"AmazonReviewsClassification (en)":31.38,"Banking77Classification":85.34,"EmotionClassification":45.84,"ImdbClassification":66.42,"MassiveIntentClassification (en)":72.73,"MassiveScenarioClassification (en)":77.08,"MTOPDomainClassification (en)":92.83,"MTOPIntentClassification (en)":76.09,"ToxicConversationsClassification":69.13,"TweetSentimentExtractionClassification":58.16} -{"index":67,"Rank":140,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":67.67,"AmazonCounterfactualClassification (en)":72.93,"AmazonPolarityClassification":74.28,"AmazonReviewsClassification (en)":36.14,"Banking77Classification":79.0,"EmotionClassification":42.85,"ImdbClassification":71.92,"MassiveIntentClassification (en)":69.99,"MassiveScenarioClassification (en)":75.15,"MTOPDomainClassification (en)":91.24,"MTOPIntentClassification (en)":74.08,"ToxicConversationsClassification":68.4,"TweetSentimentExtractionClassification":56.08} -{"index":75,"Rank":141,"Model":"gte-micro-v3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.47,"AmazonCounterfactualClassification (en)":71.43,"AmazonPolarityClassification":77.72,"AmazonReviewsClassification (en)":38.96,"Banking77Classification":80.4,"EmotionClassification":44.54,"ImdbClassification":70.59,"MassiveIntentClassification (en)":68.5,"MassiveScenarioClassification (en)":73.55,"MTOPDomainClassification (en)":90.5,"MTOPIntentClassification (en)":67.52,"ToxicConversationsClassification":66.69,"TweetSentimentExtractionClassification":59.29} -{"index":74,"Rank":142,"Model":"gte-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.47,"AmazonCounterfactualClassification (en)":71.43,"AmazonPolarityClassification":77.72,"AmazonReviewsClassification (en)":38.96,"Banking77Classification":80.4,"EmotionClassification":44.54,"ImdbClassification":70.59,"MassiveIntentClassification (en)":68.5,"MassiveScenarioClassification (en)":73.55,"MTOPDomainClassification (en)":90.5,"MTOPIntentClassification (en)":67.52,"ToxicConversationsClassification":66.69,"TweetSentimentExtractionClassification":59.29} -{"index":238,"Rank":143,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":67.41,"AmazonCounterfactualClassification (en)":67.3,"AmazonPolarityClassification":75.05,"AmazonReviewsClassification (en)":37.3,"Banking77Classification":82.32,"EmotionClassification":43.19,"ImdbClassification":70.8,"MassiveIntentClassification (en)":70.61,"MassiveScenarioClassification (en)":77.77,"MTOPDomainClassification (en)":93.84,"MTOPIntentClassification (en)":67.71,"ToxicConversationsClassification":68.48,"TweetSentimentExtractionClassification":54.54} -{"index":217,"Rank":144,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":67.32,"AmazonCounterfactualClassification (en)":75.75,"AmazonPolarityClassification":82.47,"AmazonReviewsClassification (en)":39.6,"Banking77Classification":75.76,"EmotionClassification":44.81,"ImdbClassification":73.53,"MassiveIntentClassification (en)":65.95,"MassiveScenarioClassification (en)":70.78,"MTOPDomainClassification (en)":84.29,"MTOPIntentClassification (en)":63.14,"ToxicConversationsClassification":72.04,"TweetSentimentExtractionClassification":59.73} -{"index":44,"Rank":145,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.3,"AmazonCounterfactualClassification (en)":69.73,"AmazonPolarityClassification":86.9,"AmazonReviewsClassification (en)":44.05,"Banking77Classification":75.41,"EmotionClassification":41.99,"ImdbClassification":79.54,"MassiveIntentClassification (en)":63.34,"MassiveScenarioClassification (en)":72.37,"MTOPDomainClassification (en)":89.67,"MTOPIntentClassification (en)":60.28,"ToxicConversationsClassification":67.94,"TweetSentimentExtractionClassification":56.41} -{"index":236,"Rank":146,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":67.14,"AmazonCounterfactualClassification (en)":70.03,"AmazonPolarityClassification":73.92,"AmazonReviewsClassification (en)":37.21,"Banking77Classification":81.21,"EmotionClassification":46.33,"ImdbClassification":70.86,"MassiveIntentClassification (en)":70.06,"MassiveScenarioClassification (en)":75.49,"MTOPDomainClassification (en)":94.01,"MTOPIntentClassification (en)":63.86,"ToxicConversationsClassification":68.65,"TweetSentimentExtractionClassification":54.09} -{"index":82,"Rank":147,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.13,"AmazonCounterfactualClassification (en)":67.57,"AmazonPolarityClassification":71.44,"AmazonReviewsClassification (en)":35.75,"Banking77Classification":83.22,"EmotionClassification":49.21,"ImdbClassification":63.53,"MassiveIntentClassification (en)":69.01,"MassiveScenarioClassification (en)":75.9,"MTOPDomainClassification (en)":92.56,"MTOPIntentClassification (en)":71.85,"ToxicConversationsClassification":68.84,"TweetSentimentExtractionClassification":56.69} -{"index":237,"Rank":148,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":67.11,"AmazonCounterfactualClassification (en)":68.6,"AmazonPolarityClassification":74.58,"AmazonReviewsClassification (en)":38.2,"Banking77Classification":82.22,"EmotionClassification":45.54,"ImdbClassification":68.15,"MassiveIntentClassification (en)":70.23,"MassiveScenarioClassification (en)":75.94,"MTOPDomainClassification (en)":93.6,"MTOPIntentClassification (en)":65.93,"ToxicConversationsClassification":67.56,"TweetSentimentExtractionClassification":54.77} -{"index":99,"Rank":149,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.07,"AmazonCounterfactualClassification (en)":74.81,"AmazonPolarityClassification":78.4,"AmazonReviewsClassification (en)":36.72,"Banking77Classification":80.06,"EmotionClassification":46.46,"ImdbClassification":72.88,"MassiveIntentClassification (en)":65.79,"MassiveScenarioClassification (en)":71.1,"MTOPDomainClassification (en)":92.62,"MTOPIntentClassification (en)":64.51,"ToxicConversationsClassification":64.71,"TweetSentimentExtractionClassification":56.74} -{"index":121,"Rank":150,"Model":"snowflake-arctic-embed-l-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.07,"AmazonCounterfactualClassification (en)":74.81,"AmazonPolarityClassification":78.4,"AmazonReviewsClassification (en)":36.72,"Banking77Classification":80.06,"EmotionClassification":46.46,"ImdbClassification":72.88,"MassiveIntentClassification (en)":65.79,"MassiveScenarioClassification (en)":71.1,"MTOPDomainClassification (en)":92.62,"MTOPIntentClassification (en)":64.51,"ToxicConversationsClassification":64.71,"TweetSentimentExtractionClassification":56.74} -{"index":132,"Rank":151,"Model":"embedder-100p<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.95,"AmazonCounterfactualClassification (en)":67.06,"AmazonPolarityClassification":70.41,"AmazonReviewsClassification (en)":33.21,"Banking77Classification":82.73,"EmotionClassification":43.47,"ImdbClassification":67.3,"MassiveIntentClassification (en)":71.77,"MassiveScenarioClassification (en)":77.85,"MTOPDomainClassification (en)":91.82,"MTOPIntentClassification (en)":74.7,"ToxicConversationsClassification":67.48,"TweetSentimentExtractionClassification":55.62} -{"index":214,"Rank":152,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":66.68,"AmazonCounterfactualClassification (en)":72.19,"AmazonPolarityClassification":68.63,"AmazonReviewsClassification (en)":37.42,"Banking77Classification":80.02,"EmotionClassification":44.77,"ImdbClassification":67.04,"MassiveIntentClassification (en)":67.78,"MassiveScenarioClassification (en)":76.0,"MTOPDomainClassification (en)":93.18,"MTOPIntentClassification (en)":69.31,"ToxicConversationsClassification":67.77,"TweetSentimentExtractionClassification":56.1} -{"index":79,"Rank":153,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.52,"AmazonCounterfactualClassification (en)":65.21,"AmazonPolarityClassification":73.21,"AmazonReviewsClassification (en)":34.96,"Banking77Classification":82.06,"EmotionClassification":46.39,"ImdbClassification":64.05,"MassiveIntentClassification (en)":68.65,"MassiveScenarioClassification (en)":76.04,"MTOPDomainClassification (en)":92.08,"MTOPIntentClassification (en)":71.19,"ToxicConversationsClassification":68.73,"TweetSentimentExtractionClassification":55.67} -{"index":212,"Rank":154,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":66.48,"AmazonCounterfactualClassification (en)":66.85,"AmazonPolarityClassification":85.92,"AmazonReviewsClassification (en)":41.02,"Banking77Classification":80.63,"EmotionClassification":40.55,"ImdbClassification":76.6,"MassiveIntentClassification (en)":64.95,"MassiveScenarioClassification (en)":70.38,"MTOPDomainClassification (en)":86.31,"MTOPIntentClassification (en)":62.77,"ToxicConversationsClassification":66.53,"TweetSentimentExtractionClassification":55.23} -{"index":105,"Rank":155,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.35,"AmazonCounterfactualClassification (en)":66.27,"AmazonPolarityClassification":75.37,"AmazonReviewsClassification (en)":35.81,"Banking77Classification":80.58,"EmotionClassification":42.47,"ImdbClassification":70.7,"MassiveIntentClassification (en)":67.78,"MassiveScenarioClassification (en)":73.04,"MTOPDomainClassification (en)":90.25,"MTOPIntentClassification (en)":67.95,"ToxicConversationsClassification":69.21,"TweetSentimentExtractionClassification":56.71} -{"index":123,"Rank":156,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.19,"AmazonCounterfactualClassification (en)":68.06,"AmazonPolarityClassification":68.97,"AmazonReviewsClassification (en)":33.86,"Banking77Classification":84.33,"EmotionClassification":44.87,"ImdbClassification":61.77,"MassiveIntentClassification (en)":69.67,"MassiveScenarioClassification (en)":75.34,"MTOPDomainClassification (en)":93.68,"MTOPIntentClassification (en)":71.34,"ToxicConversationsClassification":66.55,"TweetSentimentExtractionClassification":55.85} -{"index":171,"Rank":157,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.07,"AmazonCounterfactualClassification (en)":66.73,"AmazonPolarityClassification":67.61,"AmazonReviewsClassification (en)":31.18,"Banking77Classification":84.06,"EmotionClassification":44.68,"ImdbClassification":63.87,"MassiveIntentClassification (en)":71.09,"MassiveScenarioClassification (en)":76.25,"MTOPDomainClassification (en)":91.53,"MTOPIntentClassification (en)":72.83,"ToxicConversationsClassification":66.15,"TweetSentimentExtractionClassification":56.85} -{"index":251,"Rank":158,"Model":"Angle_BERT<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.01,"AmazonCounterfactualClassification (en)":77.94,"AmazonPolarityClassification":76.0,"AmazonReviewsClassification (en)":37.18,"Banking77Classification":75.5,"EmotionClassification":45.21,"ImdbClassification":68.85,"MassiveIntentClassification (en)":66.32,"MassiveScenarioClassification (en)":70.62,"MTOPDomainClassification (en)":85.38,"MTOPIntentClassification (en)":64.51,"ToxicConversationsClassification":67.08,"TweetSentimentExtractionClassification":57.57} -{"index":285,"Rank":159,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":65.94,"AmazonCounterfactualClassification (en)":74.16,"AmazonPolarityClassification":61.91,"AmazonReviewsClassification (en)":32.06,"Banking77Classification":82.05,"EmotionClassification":46.65,"ImdbClassification":65.02,"MassiveIntentClassification (en)":68.48,"MassiveScenarioClassification (en)":74.98,"MTOPDomainClassification (en)":93.17,"MTOPIntentClassification (en)":71.1,"ToxicConversationsClassification":68.15,"TweetSentimentExtractionClassification":53.57} -{"index":260,"Rank":160,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.78,"AmazonCounterfactualClassification (en)":70.81,"AmazonPolarityClassification":67.05,"AmazonReviewsClassification (en)":35.85,"Banking77Classification":74.67,"EmotionClassification":42.31,"ImdbClassification":63.69,"MassiveIntentClassification (en)":69.05,"MassiveScenarioClassification (en)":75.8,"MTOPDomainClassification (en)":92.48,"MTOPIntentClassification (en)":70.27,"ToxicConversationsClassification":68.22,"TweetSentimentExtractionClassification":59.19} -{"index":72,"Rank":161,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.72,"AmazonCounterfactualClassification (en)":70.4,"AmazonPolarityClassification":82.04,"AmazonReviewsClassification (en)":42.41,"Banking77Classification":71.07,"EmotionClassification":46.78,"ImdbClassification":74.6,"MassiveIntentClassification (en)":62.27,"MassiveScenarioClassification (en)":68.22,"MTOPDomainClassification (en)":88.24,"MTOPIntentClassification (en)":54.94,"ToxicConversationsClassification":65.15,"TweetSentimentExtractionClassification":62.54} -{"index":103,"Rank":162,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.65,"AmazonCounterfactualClassification (en)":71.18,"AmazonPolarityClassification":78.75,"AmazonReviewsClassification (en)":38.26,"Banking77Classification":79.12,"EmotionClassification":45.77,"ImdbClassification":69.49,"MassiveIntentClassification (en)":64.76,"MassiveScenarioClassification (en)":70.05,"MTOPDomainClassification (en)":90.94,"MTOPIntentClassification (en)":58.63,"ToxicConversationsClassification":61.96,"TweetSentimentExtractionClassification":58.9} -{"index":235,"Rank":163,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":65.25,"AmazonCounterfactualClassification (en)":69.33,"AmazonPolarityClassification":67.82,"AmazonReviewsClassification (en)":38.48,"Banking77Classification":79.26,"EmotionClassification":42.2,"ImdbClassification":65.99,"MassiveIntentClassification (en)":67.05,"MassiveScenarioClassification (en)":75.4,"MTOPDomainClassification (en)":92.42,"MTOPIntentClassification (en)":62.44,"ToxicConversationsClassification":66.6,"TweetSentimentExtractionClassification":56.02} -{"index":230,"Rank":164,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":65.03,"AmazonCounterfactualClassification (en)":65.03,"AmazonPolarityClassification":67.14,"AmazonReviewsClassification (en)":31.44,"Banking77Classification":81.7,"EmotionClassification":42.22,"ImdbClassification":71.17,"MassiveIntentClassification (en)":69.76,"MassiveScenarioClassification (en)":75.67,"MTOPDomainClassification (en)":91.89,"MTOPIntentClassification (en)":68.27,"ToxicConversationsClassification":61.05,"TweetSentimentExtractionClassification":55.05} -{"index":70,"Rank":165,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.86,"AmazonCounterfactualClassification (en)":69.6,"AmazonPolarityClassification":82.09,"AmazonReviewsClassification (en)":41.94,"Banking77Classification":67.13,"EmotionClassification":45.79,"ImdbClassification":74.95,"MassiveIntentClassification (en)":61.52,"MassiveScenarioClassification (en)":67.04,"MTOPDomainClassification (en)":87.27,"MTOPIntentClassification (en)":54.66,"ToxicConversationsClassification":64.47,"TweetSentimentExtractionClassification":61.8} -{"index":59,"Rank":166,"Model":"paraphrase-multilingual-mpnet-base-v2-KE_Sieve<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.85,"AmazonCounterfactualClassification (en)":79.06,"AmazonPolarityClassification":70.19,"AmazonReviewsClassification (en)":34.29,"Banking77Classification":75.89,"EmotionClassification":40.26,"ImdbClassification":61.14,"MassiveIntentClassification (en)":65.6,"MassiveScenarioClassification (en)":70.37,"MTOPDomainClassification (en)":87.22,"MTOPIntentClassification (en)":69.45,"ToxicConversationsClassification":70.26,"TweetSentimentExtractionClassification":54.49} -{"index":239,"Rank":167,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":64.71,"AmazonCounterfactualClassification (en)":64.06,"AmazonPolarityClassification":66.88,"AmazonReviewsClassification (en)":34.85,"Banking77Classification":82.35,"EmotionClassification":41.91,"ImdbClassification":60.17,"MassiveIntentClassification (en)":70.4,"MassiveScenarioClassification (en)":73.73,"MTOPDomainClassification (en)":91.34,"MTOPIntentClassification (en)":71.07,"ToxicConversationsClassification":64.01,"TweetSentimentExtractionClassification":55.74} -{"index":234,"Rank":168,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":64.67,"AmazonCounterfactualClassification (en)":71.81,"AmazonPolarityClassification":68.0,"AmazonReviewsClassification (en)":35.45,"Banking77Classification":71.48,"EmotionClassification":40.04,"ImdbClassification":61.52,"MassiveIntentClassification (en)":66.71,"MassiveScenarioClassification (en)":74.0,"MTOPDomainClassification (en)":91.59,"MTOPIntentClassification (en)":66.4,"ToxicConversationsClassification":69.09,"TweetSentimentExtractionClassification":59.97} -{"index":78,"Rank":169,"Model":"test25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.56,"AmazonCounterfactualClassification (en)":69.57,"AmazonPolarityClassification":74.74,"AmazonReviewsClassification (en)":35.36,"Banking77Classification":77.51,"EmotionClassification":39.09,"ImdbClassification":67.87,"MassiveIntentClassification (en)":65.15,"MassiveScenarioClassification (en)":70.75,"MTOPDomainClassification (en)":89.98,"MTOPIntentClassification (en)":66.43,"ToxicConversationsClassification":64.01,"TweetSentimentExtractionClassification":54.26} -{"index":257,"Rank":170,"Model":"universal-sentence-encoder-4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.45,"AmazonCounterfactualClassification (en)":70.67,"AmazonPolarityClassification":67.73,"AmazonReviewsClassification (en)":32.62,"Banking77Classification":74.12,"EmotionClassification":38.64,"ImdbClassification":68.43,"MassiveIntentClassification (en)":67.23,"MassiveScenarioClassification (en)":72.79,"MTOPDomainClassification (en)":90.12,"MTOPIntentClassification (en)":65.0,"ToxicConversationsClassification":68.99,"TweetSentimentExtractionClassification":57.01} -{"index":192,"Rank":171,"Model":"all-MiniLM-L6-v2-ds<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.67,"AmazonCounterfactualClassification (en)":61.99,"AmazonPolarityClassification":64.75,"AmazonReviewsClassification (en)":29.79,"Banking77Classification":82.92,"EmotionClassification":40.56,"ImdbClassification":61.71,"MassiveIntentClassification (en)":68.72,"MassiveScenarioClassification (en)":74.39,"MTOPDomainClassification (en)":91.45,"MTOPIntentClassification (en)":70.03,"ToxicConversationsClassification":65.33,"TweetSentimentExtractionClassification":52.41} -{"index":77,"Rank":172,"Model":"test24<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.54,"AmazonCounterfactualClassification (en)":66.84,"AmazonPolarityClassification":70.48,"AmazonReviewsClassification (en)":33.65,"Banking77Classification":78.04,"EmotionClassification":39.18,"ImdbClassification":65.14,"MassiveIntentClassification (en)":65.32,"MassiveScenarioClassification (en)":70.51,"MTOPDomainClassification (en)":89.27,"MTOPIntentClassification (en)":65.36,"ToxicConversationsClassification":63.92,"TweetSentimentExtractionClassification":54.8} -{"index":259,"Rank":173,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.42,"AmazonCounterfactualClassification (en)":69.84,"AmazonPolarityClassification":65.24,"AmazonReviewsClassification (en)":33.95,"Banking77Classification":72.79,"EmotionClassification":35.6,"ImdbClassification":66.32,"MassiveIntentClassification (en)":66.12,"MassiveScenarioClassification (en)":73.74,"MTOPDomainClassification (en)":90.4,"MTOPIntentClassification (en)":62.28,"ToxicConversationsClassification":67.56,"TweetSentimentExtractionClassification":57.24} -{"index":141,"Rank":174,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.42,"AmazonCounterfactualClassification (en)":81.49,"AmazonPolarityClassification":62.73,"AmazonReviewsClassification (en)":31.55,"Banking77Classification":73.5,"EmotionClassification":38.29,"ImdbClassification":55.75,"MassiveIntentClassification (en)":64.37,"MassiveScenarioClassification (en)":69.05,"MTOPDomainClassification (en)":89.92,"MTOPIntentClassification (en)":70.85,"ToxicConversationsClassification":67.28,"TweetSentimentExtractionClassification":56.23} -{"index":229,"Rank":175,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":63.05,"AmazonCounterfactualClassification (en)":64.15,"AmazonPolarityClassification":62.58,"AmazonReviewsClassification (en)":31.79,"Banking77Classification":79.75,"EmotionClassification":38.43,"ImdbClassification":60.66,"MassiveIntentClassification (en)":67.4,"MassiveScenarioClassification (en)":75.76,"MTOPDomainClassification (en)":91.56,"MTOPIntentClassification (en)":62.18,"ToxicConversationsClassification":66.99,"TweetSentimentExtractionClassification":55.41} -{"index":104,"Rank":176,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.94,"AmazonCounterfactualClassification (en)":65.09,"AmazonPolarityClassification":70.04,"AmazonReviewsClassification (en)":35.34,"Banking77Classification":76.37,"EmotionClassification":41.84,"ImdbClassification":62.8,"MassiveIntentClassification (en)":63.51,"MassiveScenarioClassification (en)":71.01,"MTOPDomainClassification (en)":90.8,"MTOPIntentClassification (en)":58.01,"ToxicConversationsClassification":64.31,"TweetSentimentExtractionClassification":56.19} -{"index":228,"Rank":177,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":62.87,"AmazonCounterfactualClassification (en)":65.28,"AmazonPolarityClassification":62.99,"AmazonReviewsClassification (en)":30.79,"Banking77Classification":80.41,"EmotionClassification":41.17,"ImdbClassification":59.78,"MassiveIntentClassification (en)":67.11,"MassiveScenarioClassification (en)":74.57,"MTOPDomainClassification (en)":91.88,"MTOPIntentClassification (en)":62.83,"ToxicConversationsClassification":63.34,"TweetSentimentExtractionClassification":54.24} -{"index":73,"Rank":178,"Model":"gte-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.81,"AmazonCounterfactualClassification (en)":68.82,"AmazonPolarityClassification":77.12,"AmazonReviewsClassification (en)":40.94,"Banking77Classification":69.56,"EmotionClassification":46.22,"ImdbClassification":62.17,"MassiveIntentClassification (en)":59.03,"MassiveScenarioClassification (en)":66.59,"MTOPDomainClassification (en)":86.69,"MTOPIntentClassification (en)":49.7,"ToxicConversationsClassification":66.1,"TweetSentimentExtractionClassification":60.78} -{"index":227,"Rank":179,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":62.71,"AmazonCounterfactualClassification (en)":75.93,"AmazonPolarityClassification":68.95,"AmazonReviewsClassification (en)":35.8,"Banking77Classification":69.85,"EmotionClassification":37.22,"ImdbClassification":62.04,"MassiveIntentClassification (en)":61.46,"MassiveScenarioClassification (en)":66.41,"MTOPDomainClassification (en)":86.06,"MTOPIntentClassification (en)":63.03,"ToxicConversationsClassification":66.9,"TweetSentimentExtractionClassification":58.82} -{"index":218,"Rank":180,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":62.5,"AmazonCounterfactualClassification (en)":67.09,"AmazonPolarityClassification":74.48,"AmazonReviewsClassification (en)":33.85,"Banking77Classification":73.55,"EmotionClassification":42.22,"ImdbClassification":69.63,"MassiveIntentClassification (en)":59.84,"MassiveScenarioClassification (en)":66.25,"MTOPDomainClassification (en)":81.71,"MTOPIntentClassification (en)":59.23,"ToxicConversationsClassification":68.82,"TweetSentimentExtractionClassification":53.36} -{"index":248,"Rank":181,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.23,"AmazonCounterfactualClassification (en)":70.97,"AmazonPolarityClassification":66.1,"AmazonReviewsClassification (en)":33.13,"Banking77Classification":78.08,"EmotionClassification":43.35,"ImdbClassification":59.35,"MassiveIntentClassification (en)":63.83,"MassiveScenarioClassification (en)":66.96,"MTOPDomainClassification (en)":81.05,"MTOPIntentClassification (en)":62.79,"ToxicConversationsClassification":65.97,"TweetSentimentExtractionClassification":55.22} -{"index":134,"Rank":182,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":61.67,"AmazonCounterfactualClassification (en)":61.79,"AmazonPolarityClassification":62.36,"AmazonReviewsClassification (en)":29.59,"Banking77Classification":78.6,"EmotionClassification":39.6,"ImdbClassification":61.22,"MassiveIntentClassification (en)":66.78,"MassiveScenarioClassification (en)":73.78,"MTOPDomainClassification (en)":89.97,"MTOPIntentClassification (en)":59.57,"ToxicConversationsClassification":65.12,"TweetSentimentExtractionClassification":51.63} -{"index":122,"Rank":183,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":61.66,"AmazonCounterfactualClassification (en)":74.25,"AmazonPolarityClassification":71.33,"AmazonReviewsClassification (en)":33.56,"Banking77Classification":63.41,"EmotionClassification":35.28,"ImdbClassification":65.35,"MassiveIntentClassification (en)":59.88,"MassiveScenarioClassification (en)":64.28,"MTOPDomainClassification (en)":82.63,"MTOPIntentClassification (en)":68.14,"ToxicConversationsClassification":70.0,"TweetSentimentExtractionClassification":51.81} -{"index":81,"Rank":184,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.46,"AmazonCounterfactualClassification (en)":65.88,"AmazonPolarityClassification":74.94,"AmazonReviewsClassification (en)":35.1,"Banking77Classification":74.68,"EmotionClassification":42.23,"ImdbClassification":62.9,"MassiveIntentClassification (en)":58.08,"MassiveScenarioClassification (en)":66.34,"MTOPDomainClassification (en)":81.52,"MTOPIntentClassification (en)":58.24,"ToxicConversationsClassification":62.79,"TweetSentimentExtractionClassification":54.82} -{"index":128,"Rank":185,"Model":"cai-stellaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.44,"AmazonCounterfactualClassification (en)":64.87,"AmazonPolarityClassification":65.17,"AmazonReviewsClassification (en)":31.02,"Banking77Classification":74.12,"EmotionClassification":36.71,"ImdbClassification":66.9,"MassiveIntentClassification (en)":61.95,"MassiveScenarioClassification (en)":70.2,"MTOPDomainClassification (en)":89.13,"MTOPIntentClassification (en)":62.62,"ToxicConversationsClassification":63.57,"TweetSentimentExtractionClassification":51.04} -{"index":256,"Rank":186,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.37,"AmazonCounterfactualClassification (en)":76.09,"AmazonPolarityClassification":64.51,"AmazonReviewsClassification (en)":32.07,"Banking77Classification":70.88,"EmotionClassification":32.77,"ImdbClassification":62.15,"MassiveIntentClassification (en)":61.44,"MassiveScenarioClassification (en)":66.07,"MTOPDomainClassification (en)":85.0,"MTOPIntentClassification (en)":68.76,"ToxicConversationsClassification":67.81,"TweetSentimentExtractionClassification":48.92} -{"index":80,"Rank":187,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.72,"AmazonCounterfactualClassification (en)":61.24,"AmazonPolarityClassification":65.4,"AmazonReviewsClassification (en)":31.17,"Banking77Classification":77.7,"EmotionClassification":39.08,"ImdbClassification":58.67,"MassiveIntentClassification (en)":61.41,"MassiveScenarioClassification (en)":69.74,"MTOPDomainClassification (en)":86.96,"MTOPIntentClassification (en)":62.25,"ToxicConversationsClassification":62.66,"TweetSentimentExtractionClassification":52.41} -{"index":173,"Rank":188,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.56,"AmazonCounterfactualClassification (en)":64.82,"AmazonPolarityClassification":64.28,"AmazonReviewsClassification (en)":30.62,"Banking77Classification":74.64,"EmotionClassification":36.08,"ImdbClassification":58.71,"MassiveIntentClassification (en)":64.67,"MassiveScenarioClassification (en)":71.79,"MTOPDomainClassification (en)":88.82,"MTOPIntentClassification (en)":58.61,"ToxicConversationsClassification":59.44,"TweetSentimentExtractionClassification":54.26} -{"index":233,"Rank":189,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":57.65,"AmazonCounterfactualClassification (en)":60.54,"AmazonPolarityClassification":59.59,"AmazonReviewsClassification (en)":31.01,"Banking77Classification":67.05,"EmotionClassification":33.18,"ImdbClassification":63.98,"MassiveIntentClassification (en)":57.21,"MassiveScenarioClassification (en)":66.11,"MTOPDomainClassification (en)":78.57,"MTOPIntentClassification (en)":57.07,"ToxicConversationsClassification":67.76,"TweetSentimentExtractionClassification":49.68} -{"index":232,"Rank":190,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":57.29,"AmazonCounterfactualClassification (en)":56.91,"AmazonPolarityClassification":60.32,"AmazonReviewsClassification (en)":29.67,"Banking77Classification":67.69,"EmotionClassification":36.93,"ImdbClassification":62.57,"MassiveIntentClassification (en)":56.19,"MassiveScenarioClassification (en)":66.03,"MTOPDomainClassification (en)":79.11,"MTOPIntentClassification (en)":55.85,"ToxicConversationsClassification":65.4,"TweetSentimentExtractionClassification":50.8} -{"index":11,"Rank":191,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":53.18,"AmazonCounterfactualClassification (en)":76.84,"AmazonPolarityClassification":61.01,"AmazonReviewsClassification (en)":28.71,"Banking77Classification":57.76,"EmotionClassification":24.83,"ImdbClassification":57.58,"MassiveIntentClassification (en)":47.91,"MassiveScenarioClassification (en)":55.92,"MTOPDomainClassification (en)":75.36,"MTOPIntentClassification (en)":49.47,"ToxicConversationsClassification":54.05,"TweetSentimentExtractionClassification":48.73} -{"index":231,"Rank":192,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":52.37,"AmazonCounterfactualClassification (en)":58.7,"AmazonPolarityClassification":57.77,"AmazonReviewsClassification (en)":26.26,"Banking77Classification":66.66,"EmotionClassification":24.82,"ImdbClassification":56.35,"MassiveIntentClassification (en)":51.73,"MassiveScenarioClassification (en)":58.58,"MTOPDomainClassification (en)":74.53,"MTOPIntentClassification (en)":50.05,"ToxicConversationsClassification":57.44,"TweetSentimentExtractionClassification":45.52} -{"index":255,"Rank":193,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.73,"AmazonCounterfactualClassification (en)":50.81,"AmazonPolarityClassification":52.57,"AmazonReviewsClassification (en)":22.63,"Banking77Classification":36.43,"EmotionClassification":22.75,"ImdbClassification":50.76,"MassiveIntentClassification (en)":34.33,"MassiveScenarioClassification (en)":44.13,"MTOPDomainClassification (en)":61.03,"MTOPIntentClassification (en)":29.68,"ToxicConversationsClassification":54.93,"TweetSentimentExtractionClassification":40.75} -{"index":2,"Rank":194,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":61.85,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":"","Banking77Classification":83.21,"EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""} -{"index":30,"Rank":204,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":69.91,"AmazonPolarityClassification":72.95,"AmazonReviewsClassification (en)":"","Banking77Classification":64.12,"EmotionClassification":31.51,"ImdbClassification":65.17,"MassiveIntentClassification (en)":60.48,"MassiveScenarioClassification (en)":68.76,"MTOPDomainClassification (en)":81.21,"MTOPIntentClassification (en)":51.56,"ToxicConversationsClassification":65.65,"TweetSentimentExtractionClassification":57.41} -{"index":31,"Rank":205,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":"","Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":53.41,"MassiveScenarioClassification (en)":61.78,"MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""} -{"index":32,"Rank":206,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":"","Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":54.76,"MassiveScenarioClassification (en)":63.42,"MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""} -{"index":37,"Rank":207,"Model":"openai_clip_embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":57.49,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":30.59,"Banking77Classification":73.42,"EmotionClassification":33.62,"ImdbClassification":56.17,"MassiveIntentClassification (en)":62.96,"MassiveScenarioClassification (en)":71.47,"MTOPDomainClassification (en)":87.07,"MTOPIntentClassification (en)":61.96,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":49.34} -{"index":52,"Rank":218,"Model":"gemma-2b-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":67.49,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":34.9,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""} -{"index":97,"Rank":232,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":84.82,"AmazonPolarityClassification":76.88,"AmazonReviewsClassification (en)":36.72,"Banking77Classification":"","EmotionClassification":41.93,"ImdbClassification":"","MassiveIntentClassification (en)":65.91,"MassiveScenarioClassification (en)":67.62,"MTOPDomainClassification (en)":87.95,"MTOPIntentClassification (en)":78.43,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":56.28} -{"index":98,"Rank":233,"Model":"yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":74.78,"AmazonPolarityClassification":71.89,"AmazonReviewsClassification (en)":36.7,"Banking77Classification":81.37,"EmotionClassification":42.6,"ImdbClassification":63.96,"MassiveIntentClassification (en)":68.56,"MassiveScenarioClassification (en)":74.15,"MTOPDomainClassification (en)":90.19,"MTOPIntentClassification (en)":69.5,"ToxicConversationsClassification":69.85,"TweetSentimentExtractionClassification":""} -{"index":196,"Rank":253,"Model":"fin-mpnet-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":29.13,"Banking77Classification":80.25,"EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""} -{"index":200,"Rank":254,"Model":"bge-large-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":75.54,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":"","Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""} -{"index":241,"Rank":265,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AmazonCounterfactualClassification (en)":71.57,"AmazonPolarityClassification":69.21,"AmazonReviewsClassification (en)":35.11,"Banking77Classification":79.77,"EmotionClassification":42.37,"ImdbClassification":60.46,"MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":87.06,"MTOPIntentClassification (en)":65.52,"ToxicConversationsClassification":66.07,"TweetSentimentExtractionClassification":56.12} -{"index":242,"Rank":266,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (en)":75.81,"AmazonPolarityClassification":76.41,"AmazonReviewsClassification (en)":38.51,"Banking77Classification":81.07,"EmotionClassification":45.83,"ImdbClassification":64.57,"MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":89.24,"MTOPIntentClassification (en)":68.69,"ToxicConversationsClassification":71.02,"TweetSentimentExtractionClassification":59.03} -{"index":247,"Rank":267,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":"","Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":50.68,"MassiveScenarioClassification (en)":60.82,"MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""} -{"index":262,"Rank":270,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":80.3,"AmazonReviewsClassification (en)":"","Banking77Classification":85.19,"EmotionClassification":48.22,"ImdbClassification":69.87,"MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":75.01,"TweetSentimentExtractionClassification":61.8} -{"index":263,"Rank":271,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":0.86,"AmazonReviewsClassification (en)":"","Banking77Classification":0.81,"EmotionClassification":0.48,"ImdbClassification":0.74,"MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":0.69,"TweetSentimentExtractionClassification":0.61} -{"index":267,"Rank":275,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":76.66,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":46.43,"Banking77Classification":73.74,"EmotionClassification":47.44,"ImdbClassification":79.55,"MassiveIntentClassification (en)":65.61,"MassiveScenarioClassification (en)":71.05,"MTOPDomainClassification (en)":90.48,"MTOPIntentClassification (en)":58.11,"ToxicConversationsClassification":63.53,"TweetSentimentExtractionClassification":61.77} +{"Rank":1,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.27,"AmazonReviewsClassification (fr)":43.76,"MasakhaNEWSClassification (fra)":81.52,"MassiveIntentClassification (fr)":65.42,"MassiveScenarioClassification (fr)":71.11,"MTOPDomainClassification (fr)":89.38,"MTOPIntentClassification (fr)":64.45} +{"Rank":2,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.61,"AmazonReviewsClassification (fr)":41.59,"MasakhaNEWSClassification (fra)":81.4,"MassiveIntentClassification (fr)":62.83,"MassiveScenarioClassification (fr)":69.71,"MTOPDomainClassification (fr)":90.05,"MTOPIntentClassification (fr)":66.09} +{"Rank":3,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.56,"AmazonReviewsClassification (fr)":43.36,"MasakhaNEWSClassification (fra)":74.81,"MassiveIntentClassification (fr)":68.06,"MassiveScenarioClassification (fr)":74.29,"MTOPDomainClassification (fr)":90.33,"MTOPIntentClassification (fr)":60.52} +{"Rank":4,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.45,"AmazonReviewsClassification (fr)":41.98,"MasakhaNEWSClassification (fra)":76.42,"MassiveIntentClassification (fr)":66.94,"MassiveScenarioClassification (fr)":72.78,"MTOPDomainClassification (fr)":90.12,"MTOPIntentClassification (fr)":62.44} +{"Rank":5,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.44,"AmazonReviewsClassification (fr)":42.15,"MasakhaNEWSClassification (fra)":82.13,"MassiveIntentClassification (fr)":63.08,"MassiveScenarioClassification (fr)":70.15,"MTOPDomainClassification (fr)":87.68,"MTOPIntentClassification (fr)":59.44} +{"Rank":6,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":67.36,"AmazonReviewsClassification (fr)":46.09,"MasakhaNEWSClassification (fra)":79.1,"MassiveIntentClassification (fr)":65.91,"MassiveScenarioClassification (fr)":68.53,"MTOPDomainClassification (fr)":86.2,"MTOPIntentClassification (fr)":58.33} +{"Rank":7,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.08,"AmazonReviewsClassification (fr)":41.89,"MasakhaNEWSClassification (fra)":83.06,"MassiveIntentClassification (fr)":62.94,"MassiveScenarioClassification (fr)":67.29,"MTOPDomainClassification (fr)":86.23,"MTOPIntentClassification (fr)":61.07} +{"Rank":8,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.37,"AmazonReviewsClassification (fr)":35.09,"MasakhaNEWSClassification (fra)":72.04,"MassiveIntentClassification (fr)":65.8,"MassiveScenarioClassification (fr)":73.47,"MTOPDomainClassification (fr)":88.19,"MTOPIntentClassification (fr)":63.64} +{"Rank":9,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":65.82,"AmazonReviewsClassification (fr)":37.97,"MasakhaNEWSClassification (fra)":80.62,"MassiveIntentClassification (fr)":62.65,"MassiveScenarioClassification (fr)":69.29,"MTOPDomainClassification (fr)":85.74,"MTOPIntentClassification (fr)":58.62} +{"Rank":10,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":65.09,"AmazonReviewsClassification (fr)":43.52,"MasakhaNEWSClassification (fra)":80.09,"MassiveIntentClassification (fr)":60.99,"MassiveScenarioClassification (fr)":66.42,"MTOPDomainClassification (fr)":85.14,"MTOPIntentClassification (fr)":54.39} +{"Rank":11,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":64.64,"AmazonReviewsClassification (fr)":39.0,"MasakhaNEWSClassification (fra)":78.1,"MassiveIntentClassification (fr)":61.88,"MassiveScenarioClassification (fr)":67.9,"MTOPDomainClassification (fr)":81.21,"MTOPIntentClassification (fr)":59.76} +{"Rank":12,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":64.6,"AmazonReviewsClassification (fr)":38.52,"MasakhaNEWSClassification (fra)":77.39,"MassiveIntentClassification (fr)":60.47,"MassiveScenarioClassification (fr)":65.1,"MTOPDomainClassification (fr)":84.14,"MTOPIntentClassification (fr)":62.01} +{"Rank":13,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":64.57,"AmazonReviewsClassification (fr)":34.79,"MasakhaNEWSClassification (fra)":79.29,"MassiveIntentClassification (fr)":59.41,"MassiveScenarioClassification (fr)":65.29,"MTOPDomainClassification (fr)":85.52,"MTOPIntentClassification (fr)":63.12} +{"Rank":14,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.41,"AmazonReviewsClassification (fr)":33.51,"MasakhaNEWSClassification (fra)":82.06,"MassiveIntentClassification (fr)":61.19,"MassiveScenarioClassification (fr)":70.22,"MTOPDomainClassification (fr)":85.5,"MTOPIntentClassification (fr)":53.98} +{"Rank":15,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":62.14,"AmazonReviewsClassification (fr)":35.7,"MasakhaNEWSClassification (fra)":76.87,"MassiveIntentClassification (fr)":57.02,"MassiveScenarioClassification (fr)":65.2,"MTOPDomainClassification (fr)":84.61,"MTOPIntentClassification (fr)":53.41} +{"Rank":16,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":61.64,"AmazonReviewsClassification (fr)":41.48,"MasakhaNEWSClassification (fra)":80.43,"MassiveIntentClassification (fr)":57.01,"MassiveScenarioClassification (fr)":63.6,"MTOPDomainClassification (fr)":79.6,"MTOPIntentClassification (fr)":47.73} +{"Rank":17,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.3,"AmazonReviewsClassification (fr)":38.6,"MasakhaNEWSClassification (fra)":82.58,"MassiveIntentClassification (fr)":56.31,"MassiveScenarioClassification (fr)":59.5,"MTOPDomainClassification (fr)":80.79,"MTOPIntentClassification (fr)":50.01} +{"Rank":18,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":61.02,"AmazonReviewsClassification (fr)":35.3,"MasakhaNEWSClassification (fra)":76.09,"MassiveIntentClassification (fr)":57.52,"MassiveScenarioClassification (fr)":64.52,"MTOPDomainClassification (fr)":78.63,"MTOPIntentClassification (fr)":54.05} +{"Rank":19,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.84,"AmazonReviewsClassification (fr)":37.26,"MasakhaNEWSClassification (fra)":80.19,"MassiveIntentClassification (fr)":53.7,"MassiveScenarioClassification (fr)":62.46,"MTOPDomainClassification (fr)":79.79,"MTOPIntentClassification (fr)":45.62} +{"Rank":20,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":58.08,"AmazonReviewsClassification (fr)":37.35,"MasakhaNEWSClassification (fra)":81.21,"MassiveIntentClassification (fr)":51.13,"MassiveScenarioClassification (fr)":59.92,"MTOPDomainClassification (fr)":75.03,"MTOPIntentClassification (fr)":43.85} +{"Rank":21,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":57.72,"AmazonReviewsClassification (fr)":36.71,"MasakhaNEWSClassification (fra)":80.59,"MassiveIntentClassification (fr)":46.39,"MassiveScenarioClassification (fr)":53.86,"MTOPDomainClassification (fr)":74.8,"MTOPIntentClassification (fr)":53.97} +{"Rank":22,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":56.63,"AmazonReviewsClassification (fr)":36.03,"MasakhaNEWSClassification (fra)":70.36,"MassiveIntentClassification (fr)":51.59,"MassiveScenarioClassification (fr)":61.28,"MTOPDomainClassification (fr)":77.1,"MTOPIntentClassification (fr)":43.44} +{"Rank":23,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.78,"AmazonReviewsClassification (fr)":34.25,"MasakhaNEWSClassification (fra)":73.84,"MassiveIntentClassification (fr)":51.93,"MassiveScenarioClassification (fr)":58.31,"MTOPDomainClassification (fr)":71.83,"MTOPIntentClassification (fr)":44.53} +{"Rank":24,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.9,"AmazonReviewsClassification (fr)":35.12,"MasakhaNEWSClassification (fra)":80.83,"MassiveIntentClassification (fr)":43.21,"MassiveScenarioClassification (fr)":49.78,"MTOPDomainClassification (fr)":69.24,"MTOPIntentClassification (fr)":51.25} +{"Rank":25,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":53.31,"AmazonReviewsClassification (fr)":31.12,"MasakhaNEWSClassification (fra)":65.9,"MassiveIntentClassification (fr)":46.13,"MassiveScenarioClassification (fr)":54.32,"MTOPDomainClassification (fr)":72.26,"MTOPIntentClassification (fr)":50.12} +{"Rank":26,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":52.14,"AmazonReviewsClassification (fr)":27.54,"MasakhaNEWSClassification (fra)":72.2,"MassiveIntentClassification (fr)":44.82,"MassiveScenarioClassification (fr)":53.76,"MTOPDomainClassification (fr)":75.59,"MTOPIntentClassification (fr)":38.94} +{"Rank":27,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":50.9,"AmazonReviewsClassification (fr)":27.05,"MasakhaNEWSClassification (fra)":75.62,"MassiveIntentClassification (fr)":42.64,"MassiveScenarioClassification (fr)":49.92,"MTOPDomainClassification (fr)":72.97,"MTOPIntentClassification (fr)":37.18} +{"Rank":28,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":48.37,"AmazonReviewsClassification (fr)":29.02,"MasakhaNEWSClassification (fra)":75.69,"MassiveIntentClassification (fr)":38.01,"MassiveScenarioClassification (fr)":43.63,"MTOPDomainClassification (fr)":64.49,"MTOPIntentClassification (fr)":39.4} +{"Rank":29,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":46.1,"AmazonReviewsClassification (fr)":29.39,"MasakhaNEWSClassification (fra)":64.0,"MassiveIntentClassification (fr)":37.3,"MassiveScenarioClassification (fr)":44.47,"MTOPDomainClassification (fr)":63.61,"MTOPIntentClassification (fr)":37.84} +{"Rank":30,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":46.1,"AmazonReviewsClassification (fr)":29.38,"MasakhaNEWSClassification (fra)":63.93,"MassiveIntentClassification (fr)":37.28,"MassiveScenarioClassification (fr)":44.5,"MTOPDomainClassification (fr)":63.65,"MTOPIntentClassification (fr)":37.87} +{"Rank":31,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":46.09,"AmazonReviewsClassification (fr)":29.39,"MasakhaNEWSClassification (fra)":63.91,"MassiveIntentClassification (fr)":37.3,"MassiveScenarioClassification (fr)":44.47,"MTOPDomainClassification (fr)":63.63,"MTOPIntentClassification (fr)":37.86} +{"Rank":32,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":46.09,"AmazonReviewsClassification (fr)":29.35,"MasakhaNEWSClassification (fra)":63.89,"MassiveIntentClassification (fr)":37.28,"MassiveScenarioClassification (fr)":44.47,"MTOPDomainClassification (fr)":63.7,"MTOPIntentClassification (fr)":37.85} +{"Rank":33,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":31.21,"AmazonReviewsClassification (fr)":26.75,"MasakhaNEWSClassification (fra)":60.5,"MassiveIntentClassification (fr)":13.58,"MassiveScenarioClassification (fr)":23.21,"MTOPDomainClassification (fr)":43.83,"MTOPIntentClassification (fr)":19.38} +{"Rank":34,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":30.71,"AmazonReviewsClassification (fr)":26.62,"MasakhaNEWSClassification (fra)":65.76,"MassiveIntentClassification (fr)":15.82,"MassiveScenarioClassification (fr)":23.92,"MTOPDomainClassification (fr)":36.77,"MTOPIntentClassification (fr)":15.37} +{"Rank":35,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":30.38,"AmazonReviewsClassification (fr)":26.85,"MasakhaNEWSClassification (fra)":67.94,"MassiveIntentClassification (fr)":15.09,"MassiveScenarioClassification (fr)":21.67,"MTOPDomainClassification (fr)":34.99,"MTOPIntentClassification (fr)":15.76} +{"Rank":36,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":25.21,"AmazonReviewsClassification (fr)":22.45,"MasakhaNEWSClassification (fra)":55.64,"MassiveIntentClassification (fr)":16.41,"MassiveScenarioClassification (fr)":22.72,"MTOPDomainClassification (fr)":24.27,"MTOPIntentClassification (fr)":9.79} +{"Rank":37,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":24.91,"AmazonReviewsClassification (fr)":24.9,"MasakhaNEWSClassification (fra)":71.14,"MassiveIntentClassification (fr)":6.98,"MassiveScenarioClassification (fr)":11.41,"MTOPDomainClassification (fr)":25.55,"MTOPIntentClassification (fr)":9.49} +{"Rank":38,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":23.28,"AmazonReviewsClassification (fr)":23.52,"MasakhaNEWSClassification (fra)":62.61,"MassiveIntentClassification (fr)":6.24,"MassiveScenarioClassification (fr)":10.98,"MTOPDomainClassification (fr)":27.74,"MTOPIntentClassification (fr)":8.61} +{"Rank":39,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":40,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":41,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":42,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":43,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":44,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":45,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":46,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null,"AmazonReviewsClassification (fr)":40.94,"MasakhaNEWSClassification (fra)":79.69,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":84.79,"MTOPIntentClassification (fr)":55.51} +{"Rank":47,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"AmazonReviewsClassification (fr)":41.91,"MasakhaNEWSClassification (fra)":79.38,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":86.41,"MTOPIntentClassification (fr)":59.43} +{"Rank":48,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"AmazonReviewsClassification (fr)":39.68,"MasakhaNEWSClassification (fra)":77.65,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":81.2,"MTOPIntentClassification (fr)":46.01} +{"Rank":49,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":74.05,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":50,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":51,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AmazonReviewsClassification (fr)":23.31,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":38.41,"MassiveScenarioClassification (fr)":40.26,"MTOPDomainClassification (fr)":54.61,"MTOPIntentClassification (fr)":34.71} +{"Rank":52,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":53,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AmazonReviewsClassification (fr)":33.48,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":54.83,"MassiveScenarioClassification (fr)":64.06,"MTOPDomainClassification (fr)":82.48,"MTOPIntentClassification (fr)":46.39} +{"Rank":54,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AmazonReviewsClassification (fr)":35.48,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":57.67,"MassiveScenarioClassification (fr)":66.72,"MTOPDomainClassification (fr)":85.05,"MTOPIntentClassification (fr)":51.07} +{"Rank":55,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":56,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} diff --git a/all_data_tasks/1/default.jsonl b/all_data_tasks/1/default.jsonl index 8c3475c965832ab4507ac08e72e70994950da247..d0ea9ac88d9fb3a9ba472f51e055c20b09b22807 100644 --- a/all_data_tasks/1/default.jsonl +++ b/all_data_tasks/1/default.jsonl @@ -1,205 +1,57 @@ -{"index":21,"Rank":1,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.89,"ArxivClusteringP2P":54.44,"ArxivClusteringS2S":49.33,"BiorxivClusteringP2P":53.05,"BiorxivClusteringS2S":48.38,"MedrxivClusteringP2P":45.86,"MedrxivClusteringS2S":44.33,"RedditClustering":72.33,"RedditClusteringP2P":72.72,"StackExchangeClustering":81.32,"StackExchangeClusteringP2P":46.05,"TwentyNewsgroupsClustering":68.98} -{"index":138,"Rank":2,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.69,"ArxivClusteringP2P":55.44,"ArxivClusteringS2S":50.66,"BiorxivClusteringP2P":50.68,"BiorxivClusteringS2S":46.87,"MedrxivClusteringP2P":46.87,"MedrxivClusteringS2S":44.65,"RedditClustering":72.86,"RedditClusteringP2P":75.27,"StackExchangeClustering":80.29,"StackExchangeClusteringP2P":49.57,"TwentyNewsgroupsClustering":61.43} -{"index":17,"Rank":3,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":56.92,"ArxivClusteringP2P":56.46,"ArxivClusteringS2S":51.74,"BiorxivClusteringP2P":50.09,"BiorxivClusteringS2S":46.65,"MedrxivClusteringP2P":46.23,"MedrxivClusteringS2S":44.13,"RedditClustering":73.55,"RedditClusteringP2P":74.13,"StackExchangeClustering":79.86,"StackExchangeClusteringP2P":49.41,"TwentyNewsgroupsClustering":53.91} -{"index":205,"Rank":4,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.92,"ArxivClusteringP2P":56.46,"ArxivClusteringS2S":51.74,"BiorxivClusteringP2P":50.09,"BiorxivClusteringS2S":46.65,"MedrxivClusteringP2P":46.23,"MedrxivClusteringS2S":44.13,"RedditClustering":73.55,"RedditClusteringP2P":74.13,"StackExchangeClustering":79.86,"StackExchangeClusteringP2P":49.41,"TwentyNewsgroupsClustering":53.91} -{"index":126,"Rank":5,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.92,"ArxivClusteringP2P":56.46,"ArxivClusteringS2S":51.74,"BiorxivClusteringP2P":50.09,"BiorxivClusteringS2S":46.65,"MedrxivClusteringP2P":46.23,"MedrxivClusteringS2S":44.13,"RedditClustering":73.55,"RedditClusteringP2P":74.13,"StackExchangeClustering":79.86,"StackExchangeClusteringP2P":49.41,"TwentyNewsgroupsClustering":53.91} -{"index":139,"Rank":6,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.7,"ArxivClusteringP2P":55.16,"ArxivClusteringS2S":49.82,"BiorxivClusteringP2P":50.68,"BiorxivClusteringS2S":45.81,"MedrxivClusteringP2P":46.32,"MedrxivClusteringS2S":44.29,"RedditClustering":71.19,"RedditClusteringP2P":74.42,"StackExchangeClustering":78.49,"StackExchangeClusteringP2P":48.9,"TwentyNewsgroupsClustering":58.57} -{"index":95,"Rank":7,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.17,"ArxivClusteringP2P":54.02,"ArxivClusteringS2S":48.82,"BiorxivClusteringP2P":50.76,"BiorxivClusteringS2S":46.57,"MedrxivClusteringP2P":46.66,"MedrxivClusteringS2S":44.18,"RedditClustering":62.92,"RedditClusteringP2P":72.74,"StackExchangeClustering":76.48,"StackExchangeClusteringP2P":48.29,"TwentyNewsgroupsClustering":66.42} -{"index":15,"Rank":8,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":55.83,"ArxivClusteringP2P":56.4,"ArxivClusteringS2S":51.45,"BiorxivClusteringP2P":49.01,"BiorxivClusteringS2S":45.06,"MedrxivClusteringP2P":44.37,"MedrxivClusteringS2S":42.0,"RedditClustering":73.37,"RedditClusteringP2P":72.51,"StackExchangeClustering":79.07,"StackExchangeClusteringP2P":49.57,"TwentyNewsgroupsClustering":51.31} -{"index":23,"Rank":9,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.65,"ArxivClusteringP2P":54.91,"ArxivClusteringS2S":50.28,"BiorxivClusteringP2P":52.64,"BiorxivClusteringS2S":49.2,"MedrxivClusteringP2P":45.81,"MedrxivClusteringS2S":44.11,"RedditClustering":56.03,"RedditClusteringP2P":65.83,"StackExchangeClustering":66.21,"StackExchangeClusteringP2P":45.74,"TwentyNewsgroupsClustering":70.44} -{"index":51,"Rank":10,"Model":"neural-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.32,"ArxivClusteringP2P":53.45,"ArxivClusteringS2S":48.82,"BiorxivClusteringP2P":48.82,"BiorxivClusteringS2S":45.07,"MedrxivClusteringP2P":42.63,"MedrxivClusteringS2S":40.97,"RedditClustering":66.44,"RedditClusteringP2P":69.56,"StackExchangeClustering":75.95,"StackExchangeClusteringP2P":46.2,"TwentyNewsgroupsClustering":59.6} -{"index":6,"Rank":11,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.35,"ArxivClusteringP2P":51.81,"ArxivClusteringS2S":44.73,"BiorxivClusteringP2P":46.07,"BiorxivClusteringS2S":40.64,"MedrxivClusteringP2P":42.94,"MedrxivClusteringS2S":41.44,"RedditClustering":68.5,"RedditClusteringP2P":64.86,"StackExchangeClustering":74.16,"StackExchangeClusteringP2P":45.1,"TwentyNewsgroupsClustering":66.62} -{"index":215,"Rank":12,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Average":52.8,"ArxivClusteringP2P":53.76,"ArxivClusteringS2S":49.59,"BiorxivClusteringP2P":48.15,"BiorxivClusteringS2S":44.74,"MedrxivClusteringP2P":39.24,"MedrxivClusteringS2S":36.98,"RedditClustering":63.2,"RedditClusteringP2P":68.01,"StackExchangeClustering":74.99,"StackExchangeClusteringP2P":42.04,"TwentyNewsgroupsClustering":60.13} -{"index":9,"Rank":13,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":52.42,"ArxivClusteringP2P":51.95,"ArxivClusteringS2S":42.48,"BiorxivClusteringP2P":50.15,"BiorxivClusteringS2S":42.84,"MedrxivClusteringP2P":47.24,"MedrxivClusteringS2S":43.48,"RedditClustering":63.73,"RedditClusteringP2P":64.09,"StackExchangeClustering":70.71,"StackExchangeClusteringP2P":40.34,"TwentyNewsgroupsClustering":59.56} -{"index":96,"Rank":14,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":51.67,"ArxivClusteringP2P":52.08,"ArxivClusteringS2S":47.38,"BiorxivClusteringP2P":43.94,"BiorxivClusteringS2S":41.14,"MedrxivClusteringP2P":40.03,"MedrxivClusteringS2S":39.0,"RedditClustering":59.9,"RedditClusteringP2P":67.64,"StackExchangeClustering":74.25,"StackExchangeClusteringP2P":46.78,"TwentyNewsgroupsClustering":56.27} -{"index":58,"Rank":15,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":51.42,"ArxivClusteringP2P":51.48,"ArxivClusteringS2S":47.3,"BiorxivClusteringP2P":42.73,"BiorxivClusteringS2S":39.58,"MedrxivClusteringP2P":37.84,"MedrxivClusteringS2S":36.65,"RedditClustering":61.52,"RedditClusteringP2P":68.24,"StackExchangeClustering":76.94,"StackExchangeClusteringP2P":46.04,"TwentyNewsgroupsClustering":57.3} -{"index":42,"Rank":16,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":50.61,"ArxivClusteringP2P":51.67,"ArxivClusteringS2S":48.11,"BiorxivClusteringP2P":40.87,"BiorxivClusteringS2S":39.8,"MedrxivClusteringP2P":36.52,"MedrxivClusteringS2S":36.8,"RedditClustering":61.3,"RedditClusteringP2P":67.26,"StackExchangeClustering":77.33,"StackExchangeClusteringP2P":41.33,"TwentyNewsgroupsClustering":55.7} -{"index":156,"Rank":17,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":50.26,"ArxivClusteringP2P":50.45,"ArxivClusteringS2S":45.5,"BiorxivClusteringP2P":43.53,"BiorxivClusteringS2S":40.24,"MedrxivClusteringP2P":38.19,"MedrxivClusteringS2S":37.45,"RedditClustering":57.71,"RedditClusteringP2P":66.49,"StackExchangeClustering":73.1,"StackExchangeClusteringP2P":45.91,"TwentyNewsgroupsClustering":54.31} -{"index":43,"Rank":18,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.14,"ArxivClusteringP2P":50.72,"ArxivClusteringS2S":48.01,"BiorxivClusteringP2P":41.41,"BiorxivClusteringS2S":38.67,"MedrxivClusteringP2P":36.54,"MedrxivClusteringS2S":37.24,"RedditClustering":63.01,"RedditClusteringP2P":65.86,"StackExchangeClustering":74.41,"StackExchangeClusteringP2P":38.52,"TwentyNewsgroupsClustering":57.16} -{"index":283,"Rank":19,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.01,"ArxivClusteringP2P":49.01,"ArxivClusteringS2S":44.45,"BiorxivClusteringP2P":38.03,"BiorxivClusteringS2S":36.53,"MedrxivClusteringP2P":32.7,"MedrxivClusteringS2S":31.27,"RedditClustering":67.84,"RedditClusteringP2P":67.96,"StackExchangeClustering":76.26,"StackExchangeClusteringP2P":36.88,"TwentyNewsgroupsClustering":58.14} -{"index":204,"Rank":20,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.75,"ArxivClusteringP2P":50.51,"ArxivClusteringS2S":45.01,"BiorxivClusteringP2P":43.21,"BiorxivClusteringS2S":38.82,"MedrxivClusteringP2P":39.39,"MedrxivClusteringS2S":37.9,"RedditClustering":55.82,"RedditClusteringP2P":62.09,"StackExchangeClustering":67.65,"StackExchangeClusteringP2P":46.31,"TwentyNewsgroupsClustering":49.58} -{"index":16,"Rank":21,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.75,"ArxivClusteringP2P":50.51,"ArxivClusteringS2S":45.01,"BiorxivClusteringP2P":43.21,"BiorxivClusteringS2S":38.82,"MedrxivClusteringP2P":39.39,"MedrxivClusteringS2S":37.9,"RedditClustering":55.82,"RedditClusteringP2P":62.09,"StackExchangeClustering":67.65,"StackExchangeClusteringP2P":46.31,"TwentyNewsgroupsClustering":49.58} -{"index":19,"Rank":22,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.96,"ArxivClusteringP2P":48.47,"ArxivClusteringS2S":43.39,"BiorxivClusteringP2P":40.58,"BiorxivClusteringS2S":37.94,"MedrxivClusteringP2P":35.04,"MedrxivClusteringS2S":32.94,"RedditClustering":59.76,"RedditClusteringP2P":68.03,"StackExchangeClustering":71.27,"StackExchangeClusteringP2P":39.18,"TwentyNewsgroupsClustering":50.91} -{"index":219,"Rank":23,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.86,"ArxivClusteringP2P":48.78,"ArxivClusteringS2S":46.11,"BiorxivClusteringP2P":38.79,"BiorxivClusteringS2S":37.37,"MedrxivClusteringP2P":32.73,"MedrxivClusteringS2S":32.33,"RedditClustering":62.16,"RedditClusteringP2P":65.96,"StackExchangeClustering":71.35,"StackExchangeClusteringP2P":36.11,"TwentyNewsgroupsClustering":54.78} -{"index":1,"Rank":24,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":47.48,"ArxivClusteringP2P":46.27,"ArxivClusteringS2S":38.36,"BiorxivClusteringP2P":37.87,"BiorxivClusteringS2S":35.67,"MedrxivClusteringP2P":33.11,"MedrxivClusteringS2S":31.54,"RedditClustering":65.81,"RedditClusteringP2P":66.62,"StackExchangeClustering":74.52,"StackExchangeClusteringP2P":37.63,"TwentyNewsgroupsClustering":54.87} -{"index":34,"Rank":25,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.43,"ArxivClusteringP2P":49.17,"ArxivClusteringS2S":42.18,"BiorxivClusteringP2P":40.96,"BiorxivClusteringS2S":37.25,"MedrxivClusteringP2P":37.41,"MedrxivClusteringS2S":33.39,"RedditClustering":58.46,"RedditClusteringP2P":66.35,"StackExchangeClustering":68.42,"StackExchangeClusteringP2P":37.51,"TwentyNewsgroupsClustering":50.64} -{"index":8,"Rank":26,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.4,"ArxivClusteringP2P":47.92,"ArxivClusteringS2S":42.42,"BiorxivClusteringP2P":38.72,"BiorxivClusteringS2S":36.6,"MedrxivClusteringP2P":34.04,"MedrxivClusteringS2S":32.81,"RedditClustering":61.56,"RedditClusteringP2P":65.35,"StackExchangeClustering":70.16,"StackExchangeClusteringP2P":38.23,"TwentyNewsgroupsClustering":53.56} -{"index":161,"Rank":27,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.1,"ArxivClusteringP2P":46.4,"ArxivClusteringS2S":40.49,"BiorxivClusteringP2P":40.94,"BiorxivClusteringS2S":36.28,"MedrxivClusteringP2P":36.93,"MedrxivClusteringS2S":35.54,"RedditClustering":56.6,"RedditClusteringP2P":64.27,"StackExchangeClustering":66.85,"StackExchangeClusteringP2P":42.46,"TwentyNewsgroupsClustering":51.33} -{"index":53,"Rank":28,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.93,"ArxivClusteringP2P":49.03,"ArxivClusteringS2S":43.05,"BiorxivClusteringP2P":39.74,"BiorxivClusteringS2S":36.57,"MedrxivClusteringP2P":33.71,"MedrxivClusteringS2S":31.81,"RedditClustering":61.38,"RedditClusteringP2P":65.29,"StackExchangeClustering":66.6,"StackExchangeClusteringP2P":36.32,"TwentyNewsgroupsClustering":52.69} -{"index":253,"Rank":29,"Model":"gte-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.84,"ArxivClusteringP2P":48.62,"ArxivClusteringS2S":43.36,"BiorxivClusteringP2P":39.11,"BiorxivClusteringS2S":36.85,"MedrxivClusteringP2P":33.39,"MedrxivClusteringS2S":31.76,"RedditClustering":60.83,"RedditClusteringP2P":64.24,"StackExchangeClustering":67.64,"StackExchangeClusteringP2P":36.57,"TwentyNewsgroupsClustering":52.82} -{"index":18,"Rank":30,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.82,"ArxivClusteringP2P":47.51,"ArxivClusteringS2S":42.05,"BiorxivClusteringP2P":40.32,"BiorxivClusteringS2S":37.55,"MedrxivClusteringP2P":34.6,"MedrxivClusteringS2S":32.27,"RedditClustering":58.61,"RedditClusteringP2P":66.87,"StackExchangeClustering":68.93,"StackExchangeClusteringP2P":37.6,"TwentyNewsgroupsClustering":48.75} -{"index":108,"Rank":31,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.73,"ArxivClusteringP2P":49.03,"ArxivClusteringS2S":43.09,"BiorxivClusteringP2P":39.38,"BiorxivClusteringS2S":37.23,"MedrxivClusteringP2P":33.22,"MedrxivClusteringS2S":31.18,"RedditClustering":60.52,"RedditClusteringP2P":65.35,"StackExchangeClustering":66.54,"StackExchangeClusteringP2P":36.72,"TwentyNewsgroupsClustering":51.72} -{"index":165,"Rank":32,"Model":"UAE-Large-V1-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.73,"ArxivClusteringP2P":49.03,"ArxivClusteringS2S":43.09,"BiorxivClusteringP2P":39.38,"BiorxivClusteringS2S":37.23,"MedrxivClusteringP2P":33.22,"MedrxivClusteringS2S":31.18,"RedditClustering":60.52,"RedditClusteringP2P":65.35,"StackExchangeClustering":66.54,"StackExchangeClusteringP2P":36.72,"TwentyNewsgroupsClustering":51.72} -{"index":111,"Rank":33,"Model":"UAE-Large-V1-Q5_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.73,"ArxivClusteringP2P":49.03,"ArxivClusteringS2S":43.09,"BiorxivClusteringP2P":39.38,"BiorxivClusteringS2S":37.23,"MedrxivClusteringP2P":33.22,"MedrxivClusteringS2S":31.18,"RedditClustering":60.52,"RedditClusteringP2P":65.35,"StackExchangeClustering":66.54,"StackExchangeClusteringP2P":36.72,"TwentyNewsgroupsClustering":51.72} -{"index":194,"Rank":34,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.71,"ArxivClusteringP2P":48.97,"ArxivClusteringS2S":42.98,"BiorxivClusteringP2P":39.92,"BiorxivClusteringS2S":36.73,"MedrxivClusteringP2P":33.44,"MedrxivClusteringS2S":31.66,"RedditClustering":60.22,"RedditClusteringP2P":65.29,"StackExchangeClustering":65.62,"StackExchangeClusteringP2P":35.78,"TwentyNewsgroupsClustering":53.21} -{"index":133,"Rank":35,"Model":"mxbai-embed-large-v1-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.71,"ArxivClusteringP2P":48.97,"ArxivClusteringS2S":42.98,"BiorxivClusteringP2P":39.92,"BiorxivClusteringS2S":36.73,"MedrxivClusteringP2P":33.44,"MedrxivClusteringS2S":31.66,"RedditClustering":60.22,"RedditClusteringP2P":65.29,"StackExchangeClustering":65.62,"StackExchangeClusteringP2P":35.78,"TwentyNewsgroupsClustering":53.21} -{"index":284,"Rank":36,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.65,"ArxivClusteringP2P":46.57,"ArxivClusteringS2S":39.35,"BiorxivClusteringP2P":37.77,"BiorxivClusteringS2S":34.68,"MedrxivClusteringP2P":32.77,"MedrxivClusteringS2S":31.85,"RedditClustering":64.09,"RedditClusteringP2P":65.12,"StackExchangeClustering":72.05,"StackExchangeClusteringP2P":34.04,"TwentyNewsgroupsClustering":54.81} -{"index":170,"Rank":37,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.61,"ArxivClusteringP2P":48.73,"ArxivClusteringS2S":42.87,"BiorxivClusteringP2P":39.73,"BiorxivClusteringS2S":37.24,"MedrxivClusteringP2P":34.31,"MedrxivClusteringS2S":32.18,"RedditClustering":59.43,"RedditClusteringP2P":64.54,"StackExchangeClustering":65.08,"StackExchangeClusteringP2P":35.52,"TwentyNewsgroupsClustering":53.12} -{"index":36,"Rank":38,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.6,"ArxivClusteringP2P":48.16,"ArxivClusteringS2S":40.79,"BiorxivClusteringP2P":40.5,"BiorxivClusteringS2S":36.91,"MedrxivClusteringP2P":36.18,"MedrxivClusteringS2S":33.44,"RedditClustering":58.11,"RedditClusteringP2P":65.02,"StackExchangeClustering":68.12,"StackExchangeClusteringP2P":35.22,"TwentyNewsgroupsClustering":50.14} -{"index":117,"Rank":39,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.55,"ArxivClusteringP2P":48.5,"ArxivClusteringS2S":42.58,"BiorxivClusteringP2P":39.34,"BiorxivClusteringS2S":36.18,"MedrxivClusteringP2P":34.48,"MedrxivClusteringS2S":32.26,"RedditClustering":60.62,"RedditClusteringP2P":63.29,"StackExchangeClustering":66.47,"StackExchangeClusteringP2P":36.4,"TwentyNewsgroupsClustering":51.91} -{"index":197,"Rank":40,"Model":"b1ade-embed-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.46,"ArxivClusteringP2P":48.66,"ArxivClusteringS2S":42.81,"BiorxivClusteringP2P":39.81,"BiorxivClusteringS2S":36.63,"MedrxivClusteringP2P":33.63,"MedrxivClusteringS2S":31.7,"RedditClustering":59.77,"RedditClusteringP2P":63.79,"StackExchangeClustering":66.31,"StackExchangeClusteringP2P":34.99,"TwentyNewsgroupsClustering":52.98} -{"index":261,"Rank":41,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.46,"ArxivClusteringP2P":48.66,"ArxivClusteringS2S":42.81,"BiorxivClusteringP2P":39.81,"BiorxivClusteringS2S":36.63,"MedrxivClusteringP2P":33.63,"MedrxivClusteringS2S":31.7,"RedditClustering":59.77,"RedditClusteringP2P":63.79,"StackExchangeClustering":66.31,"StackExchangeClusteringP2P":34.99,"TwentyNewsgroupsClustering":52.98} -{"index":62,"Rank":42,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":46.45,"ArxivClusteringP2P":44.27,"ArxivClusteringS2S":46.85,"BiorxivClusteringP2P":32.35,"BiorxivClusteringS2S":36.7,"MedrxivClusteringP2P":30.71,"MedrxivClusteringS2S":32.96,"RedditClustering":61.72,"RedditClusteringP2P":63.98,"StackExchangeClustering":72.74,"StackExchangeClusteringP2P":32.26,"TwentyNewsgroupsClustering":56.41} -{"index":178,"Rank":43,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.32,"ArxivClusteringP2P":47.02,"ArxivClusteringS2S":43.52,"BiorxivClusteringP2P":35.53,"BiorxivClusteringS2S":35.34,"MedrxivClusteringP2P":30.27,"MedrxivClusteringS2S":29.67,"RedditClustering":61.77,"RedditClusteringP2P":66.01,"StackExchangeClustering":72.04,"StackExchangeClusteringP2P":35.29,"TwentyNewsgroupsClustering":53.04} -{"index":282,"Rank":44,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.23,"ArxivClusteringP2P":47.05,"ArxivClusteringS2S":42.59,"BiorxivClusteringP2P":35.43,"BiorxivClusteringS2S":33.86,"MedrxivClusteringP2P":32.1,"MedrxivClusteringS2S":31.15,"RedditClustering":60.18,"RedditClusteringP2P":64.71,"StackExchangeClustering":71.23,"StackExchangeClusteringP2P":35.95,"TwentyNewsgroupsClustering":54.24} -{"index":115,"Rank":45,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.21,"ArxivClusteringP2P":48.29,"ArxivClusteringS2S":42.74,"BiorxivClusteringP2P":39.04,"BiorxivClusteringS2S":36.67,"MedrxivClusteringP2P":33.92,"MedrxivClusteringS2S":32.25,"RedditClustering":59.11,"RedditClusteringP2P":62.44,"StackExchangeClustering":66.14,"StackExchangeClusteringP2P":35.59,"TwentyNewsgroupsClustering":52.16} -{"index":252,"Rank":46,"Model":"gte-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.2,"ArxivClusteringP2P":48.6,"ArxivClusteringS2S":43.01,"BiorxivClusteringP2P":38.2,"BiorxivClusteringS2S":36.59,"MedrxivClusteringP2P":33.17,"MedrxivClusteringS2S":31.77,"RedditClustering":59.33,"RedditClusteringP2P":62.59,"StackExchangeClustering":66.64,"StackExchangeClusteringP2P":36.0,"TwentyNewsgroupsClustering":52.31} -{"index":22,"Rank":47,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":46.08,"ArxivClusteringP2P":48.57,"ArxivClusteringS2S":43.19,"BiorxivClusteringP2P":39.71,"BiorxivClusteringS2S":36.9,"MedrxivClusteringP2P":32.56,"MedrxivClusteringS2S":31.47,"RedditClustering":57.24,"RedditClusteringP2P":64.66,"StackExchangeClustering":66.4,"StackExchangeClusteringP2P":34.96,"TwentyNewsgroupsClustering":51.27} -{"index":150,"Rank":48,"Model":"bge-large-en-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.08,"ArxivClusteringP2P":48.57,"ArxivClusteringS2S":43.19,"BiorxivClusteringP2P":39.71,"BiorxivClusteringS2S":36.9,"MedrxivClusteringP2P":32.56,"MedrxivClusteringS2S":31.47,"RedditClustering":57.24,"RedditClusteringP2P":64.66,"StackExchangeClustering":66.4,"StackExchangeClusteringP2P":34.96,"TwentyNewsgroupsClustering":51.27} -{"index":114,"Rank":49,"Model":"bge-large-en-v1.5-Q4_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.08,"ArxivClusteringP2P":48.57,"ArxivClusteringS2S":43.19,"BiorxivClusteringP2P":39.71,"BiorxivClusteringS2S":36.9,"MedrxivClusteringP2P":32.56,"MedrxivClusteringS2S":31.47,"RedditClustering":57.24,"RedditClusteringP2P":64.66,"StackExchangeClustering":66.4,"StackExchangeClusteringP2P":34.96,"TwentyNewsgroupsClustering":51.27} -{"index":193,"Rank":50,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.07,"ArxivClusteringP2P":48.29,"ArxivClusteringS2S":42.34,"BiorxivClusteringP2P":39.73,"BiorxivClusteringS2S":36.95,"MedrxivClusteringP2P":33.66,"MedrxivClusteringS2S":32.22,"RedditClustering":57.93,"RedditClusteringP2P":62.47,"StackExchangeClustering":66.41,"StackExchangeClusteringP2P":35.32,"TwentyNewsgroupsClustering":51.48} -{"index":281,"Rank":51,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.9,"ArxivClusteringP2P":45.01,"ArxivClusteringS2S":36.85,"BiorxivClusteringP2P":36.66,"BiorxivClusteringS2S":34.21,"MedrxivClusteringP2P":32.6,"MedrxivClusteringS2S":30.8,"RedditClustering":61.42,"RedditClusteringP2P":64.13,"StackExchangeClustering":72.22,"StackExchangeClusteringP2P":38.49,"TwentyNewsgroupsClustering":52.56} -{"index":20,"Rank":52,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":45.81,"ArxivClusteringP2P":48.75,"ArxivClusteringS2S":42.81,"BiorxivClusteringP2P":39.44,"BiorxivClusteringS2S":36.62,"MedrxivClusteringP2P":33.21,"MedrxivClusteringS2S":31.68,"RedditClustering":56.61,"RedditClusteringP2P":62.66,"StackExchangeClustering":66.11,"StackExchangeClusteringP2P":35.24,"TwentyNewsgroupsClustering":50.75} -{"index":120,"Rank":53,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.77,"ArxivClusteringP2P":48.75,"ArxivClusteringS2S":42.81,"BiorxivClusteringP2P":39.1,"BiorxivClusteringS2S":36.7,"MedrxivClusteringP2P":32.95,"MedrxivClusteringS2S":31.56,"RedditClustering":56.24,"RedditClusteringP2P":62.88,"StackExchangeClustering":65.95,"StackExchangeClusteringP2P":35.01,"TwentyNewsgroupsClustering":51.47} -{"index":181,"Rank":54,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.77,"ArxivClusteringP2P":48.75,"ArxivClusteringS2S":42.81,"BiorxivClusteringP2P":39.1,"BiorxivClusteringS2S":36.7,"MedrxivClusteringP2P":32.95,"MedrxivClusteringS2S":31.56,"RedditClustering":56.24,"RedditClusteringP2P":62.88,"StackExchangeClustering":65.95,"StackExchangeClusteringP2P":35.01,"TwentyNewsgroupsClustering":51.47} -{"index":182,"Rank":55,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.77,"ArxivClusteringP2P":48.75,"ArxivClusteringS2S":42.81,"BiorxivClusteringP2P":39.1,"BiorxivClusteringS2S":36.7,"MedrxivClusteringP2P":32.95,"MedrxivClusteringS2S":31.56,"RedditClustering":56.24,"RedditClusteringP2P":62.88,"StackExchangeClustering":65.95,"StackExchangeClusteringP2P":35.01,"TwentyNewsgroupsClustering":51.47} -{"index":179,"Rank":56,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.77,"ArxivClusteringP2P":48.75,"ArxivClusteringS2S":42.81,"BiorxivClusteringP2P":39.1,"BiorxivClusteringS2S":36.7,"MedrxivClusteringP2P":32.95,"MedrxivClusteringS2S":31.56,"RedditClustering":56.24,"RedditClusteringP2P":62.88,"StackExchangeClustering":65.95,"StackExchangeClusteringP2P":35.01,"TwentyNewsgroupsClustering":51.47} -{"index":180,"Rank":57,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.77,"ArxivClusteringP2P":48.75,"ArxivClusteringS2S":42.81,"BiorxivClusteringP2P":39.1,"BiorxivClusteringS2S":36.7,"MedrxivClusteringP2P":32.95,"MedrxivClusteringS2S":31.56,"RedditClustering":56.24,"RedditClusteringP2P":62.88,"StackExchangeClustering":65.95,"StackExchangeClusteringP2P":35.01,"TwentyNewsgroupsClustering":51.47} -{"index":186,"Rank":58,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.58,"ArxivClusteringP2P":48.58,"ArxivClusteringS2S":43.14,"BiorxivClusteringP2P":38.84,"BiorxivClusteringS2S":36.38,"MedrxivClusteringP2P":31.96,"MedrxivClusteringS2S":30.88,"RedditClustering":57.61,"RedditClusteringP2P":64.03,"StackExchangeClustering":65.54,"StackExchangeClusteringP2P":33.98,"TwentyNewsgroupsClustering":50.41} -{"index":64,"Rank":59,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":45.54,"ArxivClusteringP2P":42.81,"ArxivClusteringS2S":44.24,"BiorxivClusteringP2P":34.27,"BiorxivClusteringS2S":35.53,"MedrxivClusteringP2P":31.07,"MedrxivClusteringS2S":31.27,"RedditClustering":60.24,"RedditClusteringP2P":64.12,"StackExchangeClustering":70.73,"StackExchangeClusteringP2P":34.5,"TwentyNewsgroupsClustering":52.18} -{"index":148,"Rank":60,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":45.29,"ArxivClusteringP2P":43.16,"ArxivClusteringS2S":32.56,"BiorxivClusteringP2P":37.62,"BiorxivClusteringS2S":31.33,"MedrxivClusteringP2P":34.22,"MedrxivClusteringS2S":32.0,"RedditClustering":63.65,"RedditClusteringP2P":64.63,"StackExchangeClustering":68.78,"StackExchangeClusteringP2P":36.15,"TwentyNewsgroupsClustering":54.13} -{"index":60,"Rank":61,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":45.24,"ArxivClusteringP2P":43.14,"ArxivClusteringS2S":42.38,"BiorxivClusteringP2P":35.88,"BiorxivClusteringS2S":34.81,"MedrxivClusteringP2P":32.23,"MedrxivClusteringS2S":31.37,"RedditClustering":61.1,"RedditClusteringP2P":64.52,"StackExchangeClustering":67.98,"StackExchangeClusteringP2P":33.2,"TwentyNewsgroupsClustering":51.04} -{"index":0,"Rank":62,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":45.07,"ArxivClusteringP2P":44.12,"ArxivClusteringS2S":36.54,"BiorxivClusteringP2P":36.28,"BiorxivClusteringS2S":33.09,"MedrxivClusteringP2P":32.08,"MedrxivClusteringS2S":30.84,"RedditClustering":62.24,"RedditClusteringP2P":63.7,"StackExchangeClustering":70.19,"StackExchangeClusteringP2P":36.1,"TwentyNewsgroupsClustering":50.6} -{"index":198,"Rank":63,"Model":"bge-base-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.06,"ArxivClusteringP2P":48.5,"ArxivClusteringS2S":42.01,"BiorxivClusteringP2P":39.3,"BiorxivClusteringS2S":35.65,"MedrxivClusteringP2P":32.8,"MedrxivClusteringS2S":30.96,"RedditClustering":55.69,"RedditClusteringP2P":62.33,"StackExchangeClustering":64.81,"StackExchangeClusteringP2P":34.08,"TwentyNewsgroupsClustering":49.5} -{"index":125,"Rank":64,"Model":"NoInstruct-small-Embedding-v0-Q4_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.95,"ArxivClusteringP2P":47.78,"ArxivClusteringS2S":40.13,"BiorxivClusteringP2P":39.4,"BiorxivClusteringS2S":35.1,"MedrxivClusteringP2P":34.71,"MedrxivClusteringS2S":32.15,"RedditClustering":55.4,"RedditClusteringP2P":61.23,"StackExchangeClustering":62.05,"StackExchangeClusteringP2P":36.65,"TwentyNewsgroupsClustering":49.86} -{"index":119,"Rank":65,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.95,"ArxivClusteringP2P":47.78,"ArxivClusteringS2S":40.13,"BiorxivClusteringP2P":39.4,"BiorxivClusteringS2S":35.1,"MedrxivClusteringP2P":34.71,"MedrxivClusteringS2S":32.15,"RedditClustering":55.4,"RedditClusteringP2P":61.23,"StackExchangeClustering":62.05,"StackExchangeClusteringP2P":36.65,"TwentyNewsgroupsClustering":49.86} -{"index":137,"Rank":66,"Model":"stella-base-en-v2-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.9,"ArxivClusteringP2P":47.24,"ArxivClusteringS2S":40.47,"BiorxivClusteringP2P":39.56,"BiorxivClusteringS2S":35.91,"MedrxivClusteringP2P":33.25,"MedrxivClusteringS2S":31.96,"RedditClustering":55.07,"RedditClusteringP2P":62.12,"StackExchangeClustering":64.47,"StackExchangeClusteringP2P":33.8,"TwentyNewsgroupsClustering":50.08} -{"index":151,"Rank":67,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.9,"ArxivClusteringP2P":47.24,"ArxivClusteringS2S":40.47,"BiorxivClusteringP2P":39.56,"BiorxivClusteringS2S":35.91,"MedrxivClusteringP2P":33.25,"MedrxivClusteringS2S":31.96,"RedditClustering":55.07,"RedditClusteringP2P":62.12,"StackExchangeClustering":64.47,"StackExchangeClusteringP2P":33.8,"TwentyNewsgroupsClustering":50.08} -{"index":254,"Rank":68,"Model":"gte-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.89,"ArxivClusteringP2P":47.9,"ArxivClusteringS2S":40.26,"BiorxivClusteringP2P":38.37,"BiorxivClusteringS2S":35.49,"MedrxivClusteringP2P":33.78,"MedrxivClusteringS2S":32.07,"RedditClustering":55.62,"RedditClusteringP2P":61.39,"StackExchangeClustering":62.63,"StackExchangeClusteringP2P":36.33,"TwentyNewsgroupsClustering":49.95} -{"index":118,"Rank":69,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.82,"ArxivClusteringP2P":47.62,"ArxivClusteringS2S":39.86,"BiorxivClusteringP2P":39.22,"BiorxivClusteringS2S":35.43,"MedrxivClusteringP2P":34.5,"MedrxivClusteringS2S":32.16,"RedditClustering":55.51,"RedditClusteringP2P":60.64,"StackExchangeClustering":61.91,"StackExchangeClusteringP2P":36.16,"TwentyNewsgroupsClustering":50.01} -{"index":149,"Rank":70,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":44.74,"ArxivClusteringP2P":42.45,"ArxivClusteringS2S":32.21,"BiorxivClusteringP2P":37.37,"BiorxivClusteringS2S":30.55,"MedrxivClusteringP2P":33.19,"MedrxivClusteringS2S":30.79,"RedditClustering":63.49,"RedditClusteringP2P":65.13,"StackExchangeClustering":68.44,"StackExchangeClusteringP2P":35.17,"TwentyNewsgroupsClustering":53.38} -{"index":33,"Rank":71,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.64,"ArxivClusteringP2P":46.19,"ArxivClusteringS2S":37.58,"BiorxivClusteringP2P":38.97,"BiorxivClusteringS2S":34.52,"MedrxivClusteringP2P":37.66,"MedrxivClusteringS2S":33.54,"RedditClustering":53.5,"RedditClusteringP2P":63.59,"StackExchangeClustering":62.94,"StackExchangeClusteringP2P":36.48,"TwentyNewsgroupsClustering":46.06} -{"index":93,"Rank":72,"Model":"e5-large-v2-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.49,"ArxivClusteringP2P":45.55,"ArxivClusteringS2S":41.02,"BiorxivClusteringP2P":37.36,"BiorxivClusteringS2S":34.85,"MedrxivClusteringP2P":31.82,"MedrxivClusteringS2S":30.38,"RedditClustering":55.83,"RedditClusteringP2P":63.38,"StackExchangeClustering":65.92,"StackExchangeClusteringP2P":33.67,"TwentyNewsgroupsClustering":49.6} -{"index":155,"Rank":73,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":44.49,"ArxivClusteringP2P":45.55,"ArxivClusteringS2S":41.02,"BiorxivClusteringP2P":37.36,"BiorxivClusteringS2S":34.85,"MedrxivClusteringP2P":31.82,"MedrxivClusteringS2S":30.38,"RedditClustering":55.83,"RedditClusteringP2P":63.38,"StackExchangeClustering":65.92,"StackExchangeClusteringP2P":33.67,"TwentyNewsgroupsClustering":49.6} -{"index":213,"Rank":74,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.93,"ArxivClusteringP2P":45.69,"ArxivClusteringS2S":36.35,"BiorxivClusteringP2P":38.77,"BiorxivClusteringS2S":32.94,"MedrxivClusteringP2P":34.53,"MedrxivClusteringS2S":30.94,"RedditClustering":56.52,"RedditClusteringP2P":61.05,"StackExchangeClustering":63.19,"StackExchangeClusteringP2P":34.03,"TwentyNewsgroupsClustering":49.21} -{"index":29,"Rank":75,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.91,"ArxivClusteringP2P":45.93,"ArxivClusteringS2S":36.65,"BiorxivClusteringP2P":38.49,"BiorxivClusteringS2S":32.76,"MedrxivClusteringP2P":33.74,"MedrxivClusteringS2S":30.64,"RedditClustering":56.32,"RedditClusteringP2P":61.9,"StackExchangeClustering":64.31,"StackExchangeClusteringP2P":33.26,"TwentyNewsgroupsClustering":49.06} -{"index":206,"Rank":76,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":43.91,"ArxivClusteringP2P":45.93,"ArxivClusteringS2S":36.65,"BiorxivClusteringP2P":38.49,"BiorxivClusteringS2S":32.76,"MedrxivClusteringP2P":33.74,"MedrxivClusteringS2S":30.64,"RedditClustering":56.32,"RedditClusteringP2P":61.9,"StackExchangeClustering":64.31,"StackExchangeClusteringP2P":33.26,"TwentyNewsgroupsClustering":49.06} -{"index":129,"Rank":77,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":43.91,"ArxivClusteringP2P":45.93,"ArxivClusteringS2S":36.65,"BiorxivClusteringP2P":38.49,"BiorxivClusteringS2S":32.76,"MedrxivClusteringP2P":33.74,"MedrxivClusteringS2S":30.64,"RedditClustering":56.32,"RedditClusteringP2P":61.9,"StackExchangeClustering":64.31,"StackExchangeClusteringP2P":33.26,"TwentyNewsgroupsClustering":49.06} -{"index":28,"Rank":78,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.91,"ArxivClusteringP2P":45.93,"ArxivClusteringS2S":36.65,"BiorxivClusteringP2P":38.49,"BiorxivClusteringS2S":32.76,"MedrxivClusteringP2P":33.74,"MedrxivClusteringS2S":30.64,"RedditClustering":56.32,"RedditClusteringP2P":61.9,"StackExchangeClustering":64.31,"StackExchangeClusteringP2P":33.26,"TwentyNewsgroupsClustering":49.06} -{"index":26,"Rank":79,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.91,"ArxivClusteringP2P":45.93,"ArxivClusteringS2S":36.65,"BiorxivClusteringP2P":38.49,"BiorxivClusteringS2S":32.76,"MedrxivClusteringP2P":33.74,"MedrxivClusteringS2S":30.64,"RedditClustering":56.32,"RedditClusteringP2P":61.9,"StackExchangeClustering":64.31,"StackExchangeClusteringP2P":33.26,"TwentyNewsgroupsClustering":49.06} -{"index":27,"Rank":80,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.91,"ArxivClusteringP2P":45.93,"ArxivClusteringS2S":36.65,"BiorxivClusteringP2P":38.49,"BiorxivClusteringS2S":32.76,"MedrxivClusteringP2P":33.74,"MedrxivClusteringS2S":30.64,"RedditClustering":56.32,"RedditClusteringP2P":61.9,"StackExchangeClustering":64.31,"StackExchangeClusteringP2P":33.26,"TwentyNewsgroupsClustering":49.06} -{"index":24,"Rank":81,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":43.82,"ArxivClusteringP2P":47.4,"ArxivClusteringS2S":40.02,"BiorxivClusteringP2P":38.47,"BiorxivClusteringS2S":34.72,"MedrxivClusteringP2P":33.06,"MedrxivClusteringS2S":30.86,"RedditClustering":52.32,"RedditClusteringP2P":60.64,"StackExchangeClustering":60.78,"StackExchangeClusteringP2P":35.27,"TwentyNewsgroupsClustering":48.52} -{"index":153,"Rank":82,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":43.8,"ArxivClusteringP2P":46.1,"ArxivClusteringS2S":39.67,"BiorxivClusteringP2P":37.5,"BiorxivClusteringS2S":32.72,"MedrxivClusteringP2P":31.46,"MedrxivClusteringS2S":28.99,"RedditClustering":56.16,"RedditClusteringP2P":63.43,"StackExchangeClustering":65.15,"StackExchangeClusteringP2P":32.5,"TwentyNewsgroupsClustering":48.16} -{"index":140,"Rank":83,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":43.8,"ArxivClusteringP2P":46.1,"ArxivClusteringS2S":39.67,"BiorxivClusteringP2P":37.5,"BiorxivClusteringS2S":32.72,"MedrxivClusteringP2P":31.46,"MedrxivClusteringS2S":28.99,"RedditClustering":56.16,"RedditClusteringP2P":63.43,"StackExchangeClustering":65.15,"StackExchangeClusteringP2P":32.5,"TwentyNewsgroupsClustering":48.16} -{"index":246,"Rank":84,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":43.72,"ArxivClusteringP2P":42.89,"ArxivClusteringS2S":33.47,"BiorxivClusteringP2P":36.53,"BiorxivClusteringS2S":28.66,"MedrxivClusteringP2P":32.09,"MedrxivClusteringS2S":26.82,"RedditClustering":58.99,"RedditClusteringP2P":64.46,"StackExchangeClustering":70.78,"StackExchangeClusteringP2P":35.25,"TwentyNewsgroupsClustering":50.93} -{"index":211,"Rank":85,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":43.71,"ArxivClusteringP2P":45.45,"ArxivClusteringS2S":36.19,"BiorxivClusteringP2P":38.41,"BiorxivClusteringS2S":32.28,"MedrxivClusteringP2P":34.47,"MedrxivClusteringS2S":31.43,"RedditClustering":55.9,"RedditClusteringP2P":60.58,"StackExchangeClustering":62.94,"StackExchangeClusteringP2P":33.81,"TwentyNewsgroupsClustering":49.36} -{"index":207,"Rank":86,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.7,"ArxivClusteringP2P":45.85,"ArxivClusteringS2S":36.13,"BiorxivClusteringP2P":38.46,"BiorxivClusteringS2S":33.14,"MedrxivClusteringP2P":32.73,"MedrxivClusteringS2S":30.55,"RedditClustering":56.82,"RedditClusteringP2P":61.57,"StackExchangeClustering":63.18,"StackExchangeClusteringP2P":33.51,"TwentyNewsgroupsClustering":48.72} -{"index":230,"Rank":87,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":43.69,"ArxivClusteringP2P":48.38,"ArxivClusteringS2S":39.72,"BiorxivClusteringP2P":39.62,"BiorxivClusteringS2S":35.02,"MedrxivClusteringP2P":35.58,"MedrxivClusteringS2S":32.87,"RedditClustering":54.82,"RedditClusteringP2P":56.77,"StackExchangeClustering":53.8,"StackExchangeClusteringP2P":34.28,"TwentyNewsgroupsClustering":49.74} -{"index":66,"Rank":88,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":43.57,"ArxivClusteringP2P":43.47,"ArxivClusteringS2S":39.85,"BiorxivClusteringP2P":37.1,"BiorxivClusteringS2S":34.28,"MedrxivClusteringP2P":33.55,"MedrxivClusteringS2S":31.11,"RedditClustering":53.02,"RedditClusteringP2P":60.47,"StackExchangeClustering":63.04,"StackExchangeClusteringP2P":34.01,"TwentyNewsgroupsClustering":49.37} -{"index":185,"Rank":89,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.48,"ArxivClusteringP2P":44.44,"ArxivClusteringS2S":34.19,"BiorxivClusteringP2P":37.82,"BiorxivClusteringS2S":32.13,"MedrxivClusteringP2P":33.88,"MedrxivClusteringS2S":32.08,"RedditClustering":54.25,"RedditClusteringP2P":61.28,"StackExchangeClustering":64.7,"StackExchangeClusteringP2P":34.23,"TwentyNewsgroupsClustering":49.31} -{"index":262,"Rank":90,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.43,"ArxivClusteringP2P":45.8,"ArxivClusteringS2S":37.64,"BiorxivClusteringP2P":38.69,"BiorxivClusteringS2S":34.45,"MedrxivClusteringP2P":32.86,"MedrxivClusteringS2S":34.02,"RedditClustering":54.76,"RedditClusteringP2P":56.28,"StackExchangeClustering":63.95,"StackExchangeClusteringP2P":32.22,"TwentyNewsgroupsClustering":47.07} -{"index":154,"Rank":91,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":43.33,"ArxivClusteringP2P":46.19,"ArxivClusteringS2S":41.38,"BiorxivClusteringP2P":37.6,"BiorxivClusteringS2S":35.09,"MedrxivClusteringP2P":32.26,"MedrxivClusteringS2S":29.66,"RedditClustering":50.69,"RedditClusteringP2P":61.37,"StackExchangeClustering":64.96,"StackExchangeClusteringP2P":33.6,"TwentyNewsgroupsClustering":43.81} -{"index":210,"Rank":92,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":43.16,"ArxivClusteringP2P":44.82,"ArxivClusteringS2S":35.32,"BiorxivClusteringP2P":38.19,"BiorxivClusteringS2S":31.83,"MedrxivClusteringP2P":34.08,"MedrxivClusteringS2S":30.98,"RedditClustering":54.92,"RedditClusteringP2P":60.23,"StackExchangeClustering":61.81,"StackExchangeClusteringP2P":34.03,"TwentyNewsgroupsClustering":48.56} -{"index":184,"Rank":93,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.97,"ArxivClusteringP2P":45.54,"ArxivClusteringS2S":38.43,"BiorxivClusteringP2P":39.3,"BiorxivClusteringS2S":35.11,"MedrxivClusteringP2P":34.8,"MedrxivClusteringS2S":32.78,"RedditClustering":49.24,"RedditClusteringP2P":58.82,"StackExchangeClustering":58.89,"StackExchangeClusteringP2P":33.6,"TwentyNewsgroupsClustering":46.13} -{"index":199,"Rank":94,"Model":"bge-base-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.88,"ArxivClusteringP2P":46.5,"ArxivClusteringS2S":40.08,"BiorxivClusteringP2P":36.79,"BiorxivClusteringS2S":34.42,"MedrxivClusteringP2P":32.0,"MedrxivClusteringS2S":30.38,"RedditClustering":51.0,"RedditClusteringP2P":59.28,"StackExchangeClustering":60.92,"StackExchangeClusteringP2P":32.98,"TwentyNewsgroupsClustering":47.3} -{"index":202,"Rank":95,"Model":"bge-small-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.63,"ArxivClusteringP2P":46.92,"ArxivClusteringS2S":39.12,"BiorxivClusteringP2P":37.43,"BiorxivClusteringS2S":33.89,"MedrxivClusteringP2P":31.9,"MedrxivClusteringS2S":30.71,"RedditClustering":50.5,"RedditClusteringP2P":60.3,"StackExchangeClustering":59.26,"StackExchangeClusteringP2P":33.76,"TwentyNewsgroupsClustering":45.09} -{"index":208,"Rank":96,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.52,"ArxivClusteringP2P":45.18,"ArxivClusteringS2S":35.91,"BiorxivClusteringP2P":36.57,"BiorxivClusteringS2S":32.05,"MedrxivClusteringP2P":31.35,"MedrxivClusteringS2S":29.56,"RedditClustering":55.37,"RedditClusteringP2P":60.05,"StackExchangeClustering":62.28,"StackExchangeClusteringP2P":33.94,"TwentyNewsgroupsClustering":45.43} -{"index":238,"Rank":97,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":42.42,"ArxivClusteringP2P":37.9,"ArxivClusteringS2S":32.39,"BiorxivClusteringP2P":30.48,"BiorxivClusteringS2S":27.5,"MedrxivClusteringP2P":29.12,"MedrxivClusteringS2S":27.56,"RedditClustering":64.13,"RedditClusteringP2P":62.84,"StackExchangeClustering":71.43,"StackExchangeClusteringP2P":32.85,"TwentyNewsgroupsClustering":50.44} -{"index":229,"Rank":98,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":42.35,"ArxivClusteringP2P":46.55,"ArxivClusteringS2S":37.86,"BiorxivClusteringP2P":38.48,"BiorxivClusteringS2S":33.17,"MedrxivClusteringP2P":34.41,"MedrxivClusteringS2S":32.29,"RedditClustering":50.67,"RedditClusteringP2P":54.15,"StackExchangeClustering":53.36,"StackExchangeClusteringP2P":38.0,"TwentyNewsgroupsClustering":46.86} -{"index":245,"Rank":99,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":42.34,"ArxivClusteringP2P":41.62,"ArxivClusteringS2S":31.17,"BiorxivClusteringP2P":36.43,"BiorxivClusteringS2S":26.47,"MedrxivClusteringP2P":32.3,"MedrxivClusteringS2S":26.93,"RedditClustering":57.03,"RedditClusteringP2P":62.34,"StackExchangeClustering":67.13,"StackExchangeClusteringP2P":34.79,"TwentyNewsgroupsClustering":49.53} -{"index":209,"Rank":100,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":42.24,"ArxivClusteringP2P":43.87,"ArxivClusteringS2S":34.57,"BiorxivClusteringP2P":36.79,"BiorxivClusteringS2S":30.68,"MedrxivClusteringP2P":34.09,"MedrxivClusteringS2S":31.3,"RedditClustering":53.31,"RedditClusteringP2P":58.96,"StackExchangeClustering":59.92,"StackExchangeClusteringP2P":33.88,"TwentyNewsgroupsClustering":47.29} -{"index":152,"Rank":101,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":42.11,"ArxivClusteringP2P":44.57,"ArxivClusteringS2S":40.48,"BiorxivClusteringP2P":36.19,"BiorxivClusteringS2S":32.72,"MedrxivClusteringP2P":31.53,"MedrxivClusteringS2S":28.29,"RedditClustering":48.18,"RedditClusteringP2P":62.19,"StackExchangeClustering":63.91,"StackExchangeClusteringP2P":32.56,"TwentyNewsgroupsClustering":42.58} -{"index":107,"Rank":102,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.09,"ArxivClusteringP2P":46.64,"ArxivClusteringS2S":36.01,"BiorxivClusteringP2P":39.19,"BiorxivClusteringS2S":32.13,"MedrxivClusteringP2P":34.11,"MedrxivClusteringS2S":31.67,"RedditClustering":50.19,"RedditClusteringP2P":56.49,"StackExchangeClustering":57.54,"StackExchangeClusteringP2P":35.74,"TwentyNewsgroupsClustering":43.27} -{"index":63,"Rank":103,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":41.99,"ArxivClusteringP2P":49.22,"ArxivClusteringS2S":41.71,"BiorxivClusteringP2P":38.39,"BiorxivClusteringS2S":31.31,"MedrxivClusteringP2P":31.47,"MedrxivClusteringS2S":27.87,"RedditClustering":43.67,"RedditClusteringP2P":61.67,"StackExchangeClustering":68.2,"StackExchangeClusteringP2P":36.36,"TwentyNewsgroupsClustering":32.01} -{"index":35,"Rank":104,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.98,"ArxivClusteringP2P":44.21,"ArxivClusteringS2S":33.91,"BiorxivClusteringP2P":38.68,"BiorxivClusteringS2S":32.21,"MedrxivClusteringP2P":35.18,"MedrxivClusteringS2S":30.99,"RedditClustering":49.5,"RedditClusteringP2P":60.21,"StackExchangeClustering":60.85,"StackExchangeClusteringP2P":33.97,"TwentyNewsgroupsClustering":42.09} -{"index":147,"Rank":105,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":41.9,"ArxivClusteringP2P":39.68,"ArxivClusteringS2S":29.19,"BiorxivClusteringP2P":32.98,"BiorxivClusteringS2S":25.72,"MedrxivClusteringP2P":30.89,"MedrxivClusteringS2S":28.38,"RedditClustering":59.26,"RedditClusteringP2P":63.22,"StackExchangeClustering":65.04,"StackExchangeClusteringP2P":35.28,"TwentyNewsgroupsClustering":51.31} -{"index":100,"Rank":106,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.87,"ArxivClusteringP2P":47.24,"ArxivClusteringS2S":37.43,"BiorxivClusteringP2P":36.97,"BiorxivClusteringS2S":31.98,"MedrxivClusteringP2P":31.5,"MedrxivClusteringS2S":28.41,"RedditClustering":47.53,"RedditClusteringP2P":62.76,"StackExchangeClustering":59.55,"StackExchangeClusteringP2P":39.43,"TwentyNewsgroupsClustering":37.73} -{"index":228,"Rank":107,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":41.81,"ArxivClusteringP2P":46.07,"ArxivClusteringS2S":37.5,"BiorxivClusteringP2P":36.99,"BiorxivClusteringS2S":33.21,"MedrxivClusteringP2P":34.25,"MedrxivClusteringS2S":32.24,"RedditClustering":51.18,"RedditClusteringP2P":54.8,"StackExchangeClustering":53.05,"StackExchangeClusteringP2P":33.13,"TwentyNewsgroupsClustering":47.47} -{"index":175,"Rank":108,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":41.73,"ArxivClusteringP2P":45.39,"ArxivClusteringS2S":36.68,"BiorxivClusteringP2P":37.05,"BiorxivClusteringS2S":30.16,"MedrxivClusteringP2P":32.41,"MedrxivClusteringS2S":28.09,"RedditClustering":53.05,"RedditClusteringP2P":60.31,"StackExchangeClustering":58.52,"StackExchangeClusteringP2P":34.96,"TwentyNewsgroupsClustering":42.46} -{"index":135,"Rank":109,"Model":"jina-embeddings-v2-base-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.73,"ArxivClusteringP2P":45.39,"ArxivClusteringS2S":36.68,"BiorxivClusteringP2P":37.05,"BiorxivClusteringS2S":30.16,"MedrxivClusteringP2P":32.41,"MedrxivClusteringS2S":28.09,"RedditClustering":53.05,"RedditClusteringP2P":60.31,"StackExchangeClustering":58.52,"StackExchangeClusteringP2P":34.96,"TwentyNewsgroupsClustering":42.46} -{"index":244,"Rank":110,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":41.65,"ArxivClusteringP2P":41.62,"ArxivClusteringS2S":29.44,"BiorxivClusteringP2P":35.99,"BiorxivClusteringS2S":24.02,"MedrxivClusteringP2P":32.4,"MedrxivClusteringS2S":26.33,"RedditClustering":54.53,"RedditClusteringP2P":62.5,"StackExchangeClustering":65.11,"StackExchangeClusteringP2P":36.86,"TwentyNewsgroupsClustering":49.33} -{"index":236,"Rank":111,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":41.6,"ArxivClusteringP2P":37.5,"ArxivClusteringS2S":30.55,"BiorxivClusteringP2P":29.59,"BiorxivClusteringS2S":25.72,"MedrxivClusteringP2P":28.72,"MedrxivClusteringS2S":27.39,"RedditClustering":61.69,"RedditClusteringP2P":61.67,"StackExchangeClustering":69.93,"StackExchangeClusteringP2P":33.21,"TwentyNewsgroupsClustering":51.64} -{"index":101,"Rank":112,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.51,"ArxivClusteringP2P":45.56,"ArxivClusteringS2S":35.61,"BiorxivClusteringP2P":36.16,"BiorxivClusteringS2S":30.14,"MedrxivClusteringP2P":31.12,"MedrxivClusteringS2S":26.73,"RedditClustering":50.24,"RedditClusteringP2P":61.45,"StackExchangeClustering":62.63,"StackExchangeClusteringP2P":37.17,"TwentyNewsgroupsClustering":39.83} -{"index":237,"Rank":113,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":41.51,"ArxivClusteringP2P":37.9,"ArxivClusteringS2S":30.45,"BiorxivClusteringP2P":30.52,"BiorxivClusteringS2S":26.06,"MedrxivClusteringP2P":28.69,"MedrxivClusteringS2S":26.69,"RedditClustering":61.34,"RedditClusteringP2P":61.11,"StackExchangeClustering":69.95,"StackExchangeClusteringP2P":32.73,"TwentyNewsgroupsClustering":51.15} -{"index":99,"Rank":114,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.49,"ArxivClusteringP2P":47.46,"ArxivClusteringS2S":38.29,"BiorxivClusteringP2P":37.54,"BiorxivClusteringS2S":32.65,"MedrxivClusteringP2P":31.47,"MedrxivClusteringS2S":28.94,"RedditClustering":46.02,"RedditClusteringP2P":62.43,"StackExchangeClustering":57.86,"StackExchangeClusteringP2P":37.82,"TwentyNewsgroupsClustering":35.91} -{"index":121,"Rank":115,"Model":"snowflake-arctic-embed-l-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.49,"ArxivClusteringP2P":47.46,"ArxivClusteringS2S":38.29,"BiorxivClusteringP2P":37.54,"BiorxivClusteringS2S":32.65,"MedrxivClusteringP2P":31.47,"MedrxivClusteringS2S":28.94,"RedditClustering":46.02,"RedditClusteringP2P":62.43,"StackExchangeClustering":57.86,"StackExchangeClusteringP2P":37.82,"TwentyNewsgroupsClustering":35.91} -{"index":214,"Rank":116,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":41.1,"ArxivClusteringP2P":42.61,"ArxivClusteringS2S":32.32,"BiorxivClusteringP2P":34.97,"BiorxivClusteringS2S":29.08,"MedrxivClusteringP2P":31.19,"MedrxivClusteringS2S":27.27,"RedditClustering":54.89,"RedditClusteringP2P":57.58,"StackExchangeClustering":63.15,"StackExchangeClusteringP2P":32.25,"TwentyNewsgroupsClustering":46.82} -{"index":160,"Rank":117,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":41.06,"ArxivClusteringP2P":44.31,"ArxivClusteringS2S":38.43,"BiorxivClusteringP2P":35.34,"BiorxivClusteringS2S":33.5,"MedrxivClusteringP2P":31.48,"MedrxivClusteringS2S":29.71,"RedditClustering":46.54,"RedditClusteringP2P":63.22,"StackExchangeClustering":57.53,"StackExchangeClusteringP2P":32.69,"TwentyNewsgroupsClustering":38.91} -{"index":44,"Rank":118,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.9,"ArxivClusteringP2P":42.32,"ArxivClusteringS2S":31.28,"BiorxivClusteringP2P":37.42,"BiorxivClusteringS2S":29.32,"MedrxivClusteringP2P":34.68,"MedrxivClusteringS2S":30.34,"RedditClustering":50.76,"RedditClusteringP2P":55.02,"StackExchangeClustering":59.66,"StackExchangeClusteringP2P":32.11,"TwentyNewsgroupsClustering":47.01} -{"index":103,"Rank":119,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.88,"ArxivClusteringP2P":44.94,"ArxivClusteringS2S":35.87,"BiorxivClusteringP2P":35.68,"BiorxivClusteringS2S":30.47,"MedrxivClusteringP2P":30.79,"MedrxivClusteringS2S":27.95,"RedditClustering":50.47,"RedditClusteringP2P":60.54,"StackExchangeClustering":60.7,"StackExchangeClusteringP2P":33.98,"TwentyNewsgroupsClustering":38.28} -{"index":183,"Rank":120,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.86,"ArxivClusteringP2P":47.03,"ArxivClusteringS2S":38.38,"BiorxivClusteringP2P":38.98,"BiorxivClusteringS2S":34.94,"MedrxivClusteringP2P":33.98,"MedrxivClusteringS2S":31.67,"RedditClustering":42.72,"RedditClusteringP2P":56.93,"StackExchangeClustering":53.6,"StackExchangeClusteringP2P":32.87,"TwentyNewsgroupsClustering":38.38} -{"index":61,"Rank":121,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":40.83,"ArxivClusteringP2P":47.81,"ArxivClusteringS2S":40.53,"BiorxivClusteringP2P":38.12,"BiorxivClusteringS2S":31.25,"MedrxivClusteringP2P":30.94,"MedrxivClusteringS2S":28.04,"RedditClustering":42.84,"RedditClusteringP2P":60.1,"StackExchangeClustering":65.12,"StackExchangeClusteringP2P":33.61,"TwentyNewsgroupsClustering":30.76} -{"index":132,"Rank":122,"Model":"embedder-100p<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.83,"ArxivClusteringP2P":42.9,"ArxivClusteringS2S":32.45,"BiorxivClusteringP2P":37.24,"BiorxivClusteringS2S":29.75,"MedrxivClusteringP2P":36.65,"MedrxivClusteringS2S":32.57,"RedditClustering":50.62,"RedditClusteringP2P":54.44,"StackExchangeClustering":54.26,"StackExchangeClusteringP2P":32.52,"TwentyNewsgroupsClustering":45.7} -{"index":169,"Rank":123,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.81,"ArxivClusteringP2P":45.66,"ArxivClusteringS2S":38.42,"BiorxivClusteringP2P":35.54,"BiorxivClusteringS2S":32.19,"MedrxivClusteringP2P":30.95,"MedrxivClusteringS2S":28.87,"RedditClustering":47.9,"RedditClusteringP2P":55.95,"StackExchangeClustering":60.27,"StackExchangeClusteringP2P":32.34,"TwentyNewsgroupsClustering":40.81} -{"index":167,"Rank":124,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.74,"ArxivClusteringP2P":45.37,"ArxivClusteringS2S":36.52,"BiorxivClusteringP2P":36.52,"BiorxivClusteringS2S":31.98,"MedrxivClusteringP2P":31.71,"MedrxivClusteringS2S":28.8,"RedditClustering":49.71,"RedditClusteringP2P":55.07,"StackExchangeClustering":59.42,"StackExchangeClusteringP2P":31.7,"TwentyNewsgroupsClustering":41.29} -{"index":69,"Rank":125,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.7,"ArxivClusteringP2P":46.39,"ArxivClusteringS2S":35.41,"BiorxivClusteringP2P":38.98,"BiorxivClusteringS2S":31.74,"MedrxivClusteringP2P":33.13,"MedrxivClusteringS2S":30.29,"RedditClustering":47.53,"RedditClusteringP2P":56.03,"StackExchangeClustering":53.87,"StackExchangeClusteringP2P":33.57,"TwentyNewsgroupsClustering":40.77} -{"index":65,"Rank":126,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":40.63,"ArxivClusteringP2P":47.56,"ArxivClusteringS2S":39.92,"BiorxivClusteringP2P":36.14,"BiorxivClusteringS2S":30.26,"MedrxivClusteringP2P":30.11,"MedrxivClusteringS2S":26.93,"RedditClustering":41.83,"RedditClusteringP2P":62.08,"StackExchangeClustering":67.34,"StackExchangeClusteringP2P":34.5,"TwentyNewsgroupsClustering":30.26} -{"index":267,"Rank":127,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.54,"ArxivClusteringP2P":43.24,"ArxivClusteringS2S":36.49,"BiorxivClusteringP2P":35.56,"BiorxivClusteringS2S":31.02,"MedrxivClusteringP2P":31.7,"MedrxivClusteringS2S":27.76,"RedditClustering":49.16,"RedditClusteringP2P":61.55,"StackExchangeClustering":56.77,"StackExchangeClusteringP2P":32.04,"TwentyNewsgroupsClustering":40.7} -{"index":112,"Rank":128,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.4,"ArxivClusteringP2P":41.78,"ArxivClusteringS2S":34.13,"BiorxivClusteringP2P":35.01,"BiorxivClusteringS2S":31.0,"MedrxivClusteringP2P":29.71,"MedrxivClusteringS2S":28.0,"RedditClustering":49.53,"RedditClusteringP2P":59.71,"StackExchangeClustering":60.73,"StackExchangeClusteringP2P":34.64,"TwentyNewsgroupsClustering":40.12} -{"index":83,"Rank":129,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.34,"ArxivClusteringP2P":45.59,"ArxivClusteringS2S":38.86,"BiorxivClusteringP2P":36.55,"BiorxivClusteringS2S":33.7,"MedrxivClusteringP2P":31.51,"MedrxivClusteringS2S":28.76,"RedditClustering":40.45,"RedditClusteringP2P":55.75,"StackExchangeClustering":59.21,"StackExchangeClusteringP2P":33.95,"TwentyNewsgroupsClustering":39.46} -{"index":212,"Rank":130,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":40.31,"ArxivClusteringP2P":41.8,"ArxivClusteringS2S":32.41,"BiorxivClusteringP2P":34.81,"BiorxivClusteringS2S":28.59,"MedrxivClusteringP2P":32.73,"MedrxivClusteringS2S":29.91,"RedditClustering":50.31,"RedditClusteringP2P":56.57,"StackExchangeClustering":57.99,"StackExchangeClusteringP2P":33.64,"TwentyNewsgroupsClustering":44.61} -{"index":243,"Rank":131,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":40.21,"ArxivClusteringP2P":39.28,"ArxivClusteringS2S":27.26,"BiorxivClusteringP2P":33.99,"BiorxivClusteringS2S":22.92,"MedrxivClusteringP2P":33.2,"MedrxivClusteringS2S":26.13,"RedditClustering":52.93,"RedditClusteringP2P":59.67,"StackExchangeClustering":63.13,"StackExchangeClusteringP2P":35.68,"TwentyNewsgroupsClustering":48.1} -{"index":136,"Rank":132,"Model":"jina-embeddings-v2-small-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.08,"ArxivClusteringP2P":44.02,"ArxivClusteringS2S":35.16,"BiorxivClusteringP2P":35.57,"BiorxivClusteringS2S":29.07,"MedrxivClusteringP2P":31.86,"MedrxivClusteringS2S":27.51,"RedditClustering":49.28,"RedditClusteringP2P":57.09,"StackExchangeClustering":55.35,"StackExchangeClusteringP2P":34.42,"TwentyNewsgroupsClustering":41.57} -{"index":177,"Rank":133,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.08,"ArxivClusteringP2P":44.02,"ArxivClusteringS2S":35.16,"BiorxivClusteringP2P":35.57,"BiorxivClusteringS2S":29.07,"MedrxivClusteringP2P":31.86,"MedrxivClusteringS2S":27.51,"RedditClustering":49.28,"RedditClusteringP2P":57.09,"StackExchangeClustering":55.35,"StackExchangeClusteringP2P":34.42,"TwentyNewsgroupsClustering":41.57} -{"index":192,"Rank":134,"Model":"all-MiniLM-L6-v2-ds<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.94,"ArxivClusteringP2P":46.19,"ArxivClusteringS2S":36.91,"BiorxivClusteringP2P":38.59,"BiorxivClusteringS2S":32.32,"MedrxivClusteringP2P":32.55,"MedrxivClusteringS2S":29.92,"RedditClustering":45.63,"RedditClusteringP2P":53.37,"StackExchangeClustering":47.28,"StackExchangeClusteringP2P":34.25,"TwentyNewsgroupsClustering":42.38} -{"index":158,"Rank":135,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.92,"ArxivClusteringP2P":42.12,"ArxivClusteringS2S":34.8,"BiorxivClusteringP2P":35.89,"BiorxivClusteringS2S":30.05,"MedrxivClusteringP2P":31.34,"MedrxivClusteringS2S":27.88,"RedditClustering":45.72,"RedditClusteringP2P":59.66,"StackExchangeClustering":58.51,"StackExchangeClusteringP2P":31.98,"TwentyNewsgroupsClustering":41.13} -{"index":79,"Rank":136,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.92,"ArxivClusteringP2P":43.38,"ArxivClusteringS2S":33.71,"BiorxivClusteringP2P":35.06,"BiorxivClusteringS2S":30.71,"MedrxivClusteringP2P":32.08,"MedrxivClusteringS2S":29.45,"RedditClustering":48.23,"RedditClusteringP2P":53.18,"StackExchangeClustering":60.86,"StackExchangeClusteringP2P":32.36,"TwentyNewsgroupsClustering":40.06} -{"index":82,"Rank":137,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.83,"ArxivClusteringP2P":44.72,"ArxivClusteringS2S":35.08,"BiorxivClusteringP2P":34.41,"BiorxivClusteringS2S":30.53,"MedrxivClusteringP2P":31.35,"MedrxivClusteringS2S":28.77,"RedditClustering":46.47,"RedditClusteringP2P":54.17,"StackExchangeClustering":59.19,"StackExchangeClusteringP2P":32.57,"TwentyNewsgroupsClustering":40.89} -{"index":76,"Rank":138,"Model":"gte-micro-v4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.54,"ArxivClusteringP2P":42.86,"ArxivClusteringS2S":32.48,"BiorxivClusteringP2P":36.83,"BiorxivClusteringS2S":29.24,"MedrxivClusteringP2P":32.55,"MedrxivClusteringS2S":30.8,"RedditClustering":48.28,"RedditClusteringP2P":53.56,"StackExchangeClustering":55.07,"StackExchangeClusteringP2P":31.92,"TwentyNewsgroupsClustering":41.37} -{"index":157,"Rank":139,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":39.51,"ArxivClusteringP2P":44.14,"ArxivClusteringS2S":37.14,"BiorxivClusteringP2P":35.81,"BiorxivClusteringS2S":31.86,"MedrxivClusteringP2P":31.34,"MedrxivClusteringS2S":28.2,"RedditClustering":42.87,"RedditClusteringP2P":56.39,"StackExchangeClustering":59.08,"StackExchangeClusteringP2P":30.3,"TwentyNewsgroupsClustering":37.51} -{"index":116,"Rank":140,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.48,"ArxivClusteringP2P":45.31,"ArxivClusteringS2S":35.45,"BiorxivClusteringP2P":38.38,"BiorxivClusteringS2S":31.09,"MedrxivClusteringP2P":33.35,"MedrxivClusteringS2S":29.79,"RedditClustering":44.12,"RedditClusteringP2P":48.68,"StackExchangeClustering":53.11,"StackExchangeClusteringP2P":33.91,"TwentyNewsgroupsClustering":41.13} -{"index":105,"Rank":141,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.46,"ArxivClusteringP2P":44.6,"ArxivClusteringS2S":34.48,"BiorxivClusteringP2P":37.13,"BiorxivClusteringS2S":29.72,"MedrxivClusteringP2P":31.98,"MedrxivClusteringS2S":29.26,"RedditClustering":45.27,"RedditClusteringP2P":54.47,"StackExchangeClustering":53.11,"StackExchangeClusteringP2P":34.67,"TwentyNewsgroupsClustering":39.37} -{"index":176,"Rank":142,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.29,"ArxivClusteringP2P":41.55,"ArxivClusteringS2S":32.19,"BiorxivClusteringP2P":35.17,"BiorxivClusteringS2S":28.89,"MedrxivClusteringP2P":32.19,"MedrxivClusteringS2S":28.78,"RedditClustering":48.28,"RedditClusteringP2P":56.03,"StackExchangeClustering":55.39,"StackExchangeClusteringP2P":33.57,"TwentyNewsgroupsClustering":40.2} -{"index":106,"Rank":143,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.18,"ArxivClusteringP2P":44.53,"ArxivClusteringS2S":33.25,"BiorxivClusteringP2P":36.11,"BiorxivClusteringS2S":28.06,"MedrxivClusteringP2P":31.56,"MedrxivClusteringS2S":28.13,"RedditClustering":45.46,"RedditClusteringP2P":55.16,"StackExchangeClustering":54.51,"StackExchangeClusteringP2P":34.07,"TwentyNewsgroupsClustering":40.19} -{"index":166,"Rank":144,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.11,"ArxivClusteringP2P":44.52,"ArxivClusteringS2S":34.45,"BiorxivClusteringP2P":35.25,"BiorxivClusteringS2S":30.71,"MedrxivClusteringP2P":29.43,"MedrxivClusteringS2S":28.87,"RedditClustering":44.52,"RedditClusteringP2P":53.61,"StackExchangeClustering":57.11,"StackExchangeClusteringP2P":31.91,"TwentyNewsgroupsClustering":39.8} -{"index":174,"Rank":145,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.99,"ArxivClusteringP2P":41.43,"ArxivClusteringS2S":32.09,"BiorxivClusteringP2P":34.97,"BiorxivClusteringS2S":28.78,"MedrxivClusteringP2P":31.33,"MedrxivClusteringS2S":28.76,"RedditClustering":47.75,"RedditClusteringP2P":54.88,"StackExchangeClustering":55.38,"StackExchangeClusteringP2P":33.08,"TwentyNewsgroupsClustering":40.48} -{"index":123,"Rank":146,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.93,"ArxivClusteringP2P":44.59,"ArxivClusteringS2S":38.03,"BiorxivClusteringP2P":36.03,"BiorxivClusteringS2S":32.48,"MedrxivClusteringP2P":31.05,"MedrxivClusteringS2S":29.26,"RedditClustering":35.53,"RedditClusteringP2P":54.52,"StackExchangeClustering":55.13,"StackExchangeClusteringP2P":34.31,"TwentyNewsgroupsClustering":37.28} -{"index":104,"Rank":147,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.86,"ArxivClusteringP2P":43.5,"ArxivClusteringS2S":32.09,"BiorxivClusteringP2P":34.48,"BiorxivClusteringS2S":26.14,"MedrxivClusteringP2P":30.57,"MedrxivClusteringS2S":26.22,"RedditClustering":48.32,"RedditClusteringP2P":57.84,"StackExchangeClustering":57.49,"StackExchangeClusteringP2P":34.58,"TwentyNewsgroupsClustering":36.28} -{"index":235,"Rank":148,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":38.63,"ArxivClusteringP2P":35.49,"ArxivClusteringS2S":27.18,"BiorxivClusteringP2P":27.66,"BiorxivClusteringS2S":23.25,"MedrxivClusteringP2P":27.57,"MedrxivClusteringS2S":25.13,"RedditClustering":56.13,"RedditClusteringP2P":58.53,"StackExchangeClustering":64.21,"StackExchangeClusteringP2P":33.01,"TwentyNewsgroupsClustering":46.72} -{"index":242,"Rank":149,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":38.4,"ArxivClusteringP2P":37.78,"ArxivClusteringS2S":31.68,"BiorxivClusteringP2P":33.09,"BiorxivClusteringS2S":29.6,"MedrxivClusteringP2P":31.96,"MedrxivClusteringS2S":31.7,"RedditClustering":45.24,"RedditClusteringP2P":51.31,"StackExchangeClustering":52.98,"StackExchangeClusteringP2P":32.94,"TwentyNewsgroupsClustering":44.1} -{"index":203,"Rank":150,"Model":"bge-small-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.37,"ArxivClusteringP2P":43.11,"ArxivClusteringS2S":34.41,"BiorxivClusteringP2P":34.7,"BiorxivClusteringS2S":28.84,"MedrxivClusteringP2P":30.49,"MedrxivClusteringS2S":28.8,"RedditClustering":42.86,"RedditClusteringP2P":54.27,"StackExchangeClustering":53.09,"StackExchangeClusteringP2P":33.15,"TwentyNewsgroupsClustering":38.35} -{"index":113,"Rank":151,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.03,"ArxivClusteringP2P":43.85,"ArxivClusteringS2S":29.59,"BiorxivClusteringP2P":37.8,"BiorxivClusteringS2S":26.76,"MedrxivClusteringP2P":32.54,"MedrxivClusteringS2S":28.05,"RedditClustering":43.68,"RedditClusteringP2P":54.33,"StackExchangeClustering":51.77,"StackExchangeClusteringP2P":33.31,"TwentyNewsgroupsClustering":36.65} -{"index":128,"Rank":152,"Model":"cai-stellaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.92,"ArxivClusteringP2P":42.01,"ArxivClusteringS2S":31.41,"BiorxivClusteringP2P":34.73,"BiorxivClusteringS2S":29.28,"MedrxivClusteringP2P":31.77,"MedrxivClusteringS2S":30.94,"RedditClustering":43.09,"RedditClusteringP2P":56.54,"StackExchangeClustering":48.23,"StackExchangeClusteringP2P":34.76,"TwentyNewsgroupsClustering":34.36} -{"index":159,"Rank":153,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":37.89,"ArxivClusteringP2P":40.28,"ArxivClusteringS2S":35.42,"BiorxivClusteringP2P":35.04,"BiorxivClusteringS2S":29.46,"MedrxivClusteringP2P":28.92,"MedrxivClusteringS2S":28.43,"RedditClustering":42.41,"RedditClusteringP2P":55.17,"StackExchangeClustering":55.27,"StackExchangeClusteringP2P":30.46,"TwentyNewsgroupsClustering":35.97} -{"index":258,"Rank":154,"Model":"universal-sentence-encoder-large-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.82,"ArxivClusteringP2P":34.72,"ArxivClusteringS2S":25.27,"BiorxivClusteringP2P":28.39,"BiorxivClusteringS2S":20.52,"MedrxivClusteringP2P":30.27,"MedrxivClusteringS2S":24.58,"RedditClustering":56.93,"RedditClusteringP2P":58.95,"StackExchangeClustering":60.85,"StackExchangeClusteringP2P":33.14,"TwentyNewsgroupsClustering":42.43} -{"index":239,"Rank":155,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":37.64,"ArxivClusteringP2P":36.94,"ArxivClusteringS2S":29.03,"BiorxivClusteringP2P":32.35,"BiorxivClusteringS2S":28.16,"MedrxivClusteringP2P":30.23,"MedrxivClusteringS2S":27.01,"RedditClustering":48.04,"RedditClusteringP2P":53.53,"StackExchangeClustering":59.54,"StackExchangeClusteringP2P":30.48,"TwentyNewsgroupsClustering":38.68} -{"index":68,"Rank":156,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.6,"ArxivClusteringP2P":40.28,"ArxivClusteringS2S":31.06,"BiorxivClusteringP2P":35.18,"BiorxivClusteringS2S":28.18,"MedrxivClusteringP2P":30.4,"MedrxivClusteringS2S":28.79,"RedditClustering":45.93,"RedditClusteringP2P":51.44,"StackExchangeClustering":52.18,"StackExchangeClusteringP2P":30.67,"TwentyNewsgroupsClustering":39.44} -{"index":277,"Rank":157,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.52,"ArxivClusteringP2P":41.49,"ArxivClusteringS2S":28.47,"BiorxivClusteringP2P":36.86,"BiorxivClusteringS2S":27.55,"MedrxivClusteringP2P":31.09,"MedrxivClusteringS2S":26.5,"RedditClustering":42.47,"RedditClusteringP2P":58.1,"StackExchangeClustering":53.52,"StackExchangeClusteringP2P":30.43,"TwentyNewsgroupsClustering":36.26} -{"index":67,"Rank":158,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":37.45,"ArxivClusteringP2P":42.92,"ArxivClusteringS2S":35.2,"BiorxivClusteringP2P":35.02,"BiorxivClusteringS2S":27.21,"MedrxivClusteringP2P":30.15,"MedrxivClusteringS2S":26.96,"RedditClustering":38.67,"RedditClusteringP2P":53.42,"StackExchangeClustering":59.35,"StackExchangeClusteringP2P":31.47,"TwentyNewsgroupsClustering":31.54} -{"index":172,"Rank":159,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.15,"ArxivClusteringP2P":40.51,"ArxivClusteringS2S":31.58,"BiorxivClusteringP2P":33.56,"BiorxivClusteringS2S":28.44,"MedrxivClusteringP2P":30.12,"MedrxivClusteringS2S":25.26,"RedditClustering":45.05,"RedditClusteringP2P":55.14,"StackExchangeClustering":45.24,"StackExchangeClusteringP2P":33.37,"TwentyNewsgroupsClustering":40.42} -{"index":241,"Rank":160,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":37.14,"ArxivClusteringP2P":38.33,"ArxivClusteringS2S":31.55,"BiorxivClusteringP2P":33.49,"BiorxivClusteringS2S":29.44,"MedrxivClusteringP2P":31.52,"MedrxivClusteringS2S":30.87,"RedditClustering":42.02,"RedditClusteringP2P":50.73,"StackExchangeClustering":49.6,"StackExchangeClusteringP2P":31.69,"TwentyNewsgroupsClustering":39.28} -{"index":162,"Rank":161,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":37.08,"ArxivClusteringP2P":39.22,"ArxivClusteringS2S":30.8,"BiorxivClusteringP2P":35.75,"BiorxivClusteringS2S":27.05,"MedrxivClusteringP2P":30.9,"MedrxivClusteringS2S":27.26,"RedditClustering":39.13,"RedditClusteringP2P":58.98,"StackExchangeClustering":53.52,"StackExchangeClusteringP2P":32.07,"TwentyNewsgroupsClustering":33.22} -{"index":84,"Rank":162,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.98,"ArxivClusteringP2P":40.55,"ArxivClusteringS2S":32.49,"BiorxivClusteringP2P":33.59,"BiorxivClusteringS2S":29.13,"MedrxivClusteringP2P":30.33,"MedrxivClusteringS2S":28.02,"RedditClustering":42.17,"RedditClusteringP2P":48.02,"StackExchangeClustering":54.13,"StackExchangeClusteringP2P":31.12,"TwentyNewsgroupsClustering":37.2} -{"index":168,"Rank":163,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.89,"ArxivClusteringP2P":40.27,"ArxivClusteringS2S":32.0,"BiorxivClusteringP2P":33.27,"BiorxivClusteringS2S":28.65,"MedrxivClusteringP2P":27.85,"MedrxivClusteringS2S":27.71,"RedditClustering":41.34,"RedditClusteringP2P":51.2,"StackExchangeClustering":54.51,"StackExchangeClusteringP2P":31.12,"TwentyNewsgroupsClustering":37.88} -{"index":73,"Rank":164,"Model":"gte-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":35.96,"ArxivClusteringP2P":35.24,"ArxivClusteringS2S":31.09,"BiorxivClusteringP2P":30.2,"BiorxivClusteringS2S":27.38,"MedrxivClusteringP2P":27.17,"MedrxivClusteringS2S":27.52,"RedditClustering":45.62,"RedditClusteringP2P":47.87,"StackExchangeClustering":52.62,"StackExchangeClusteringP2P":30.05,"TwentyNewsgroupsClustering":40.79} -{"index":171,"Rank":165,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":35.88,"ArxivClusteringP2P":39.25,"ArxivClusteringS2S":29.08,"BiorxivClusteringP2P":32.2,"BiorxivClusteringS2S":26.63,"MedrxivClusteringP2P":29.6,"MedrxivClusteringS2S":25.01,"RedditClustering":42.85,"RedditClusteringP2P":52.55,"StackExchangeClustering":48.07,"StackExchangeClusteringP2P":31.36,"TwentyNewsgroupsClustering":38.06} -{"index":80,"Rank":166,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":35.79,"ArxivClusteringP2P":39.71,"ArxivClusteringS2S":28.24,"BiorxivClusteringP2P":33.63,"BiorxivClusteringS2S":27.04,"MedrxivClusteringP2P":31.37,"MedrxivClusteringS2S":26.87,"RedditClustering":40.23,"RedditClusteringP2P":49.09,"StackExchangeClustering":52.74,"StackExchangeClusteringP2P":32.66,"TwentyNewsgroupsClustering":32.13} -{"index":77,"Rank":167,"Model":"test24<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":35.78,"ArxivClusteringP2P":41.21,"ArxivClusteringS2S":30.2,"BiorxivClusteringP2P":35.62,"BiorxivClusteringS2S":25.4,"MedrxivClusteringP2P":31.42,"MedrxivClusteringS2S":27.14,"RedditClustering":39.24,"RedditClusteringP2P":52.2,"StackExchangeClustering":46.43,"StackExchangeClusteringP2P":34.31,"TwentyNewsgroupsClustering":30.43} -{"index":257,"Rank":168,"Model":"universal-sentence-encoder-4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":35.71,"ArxivClusteringP2P":35.13,"ArxivClusteringS2S":23.46,"BiorxivClusteringP2P":31.17,"BiorxivClusteringS2S":18.81,"MedrxivClusteringP2P":28.88,"MedrxivClusteringS2S":23.31,"RedditClustering":49.72,"RedditClusteringP2P":57.92,"StackExchangeClustering":54.64,"StackExchangeClusteringP2P":31.58,"TwentyNewsgroupsClustering":38.23} -{"index":134,"Rank":169,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":35.67,"ArxivClusteringP2P":37.75,"ArxivClusteringS2S":27.7,"BiorxivClusteringP2P":30.24,"BiorxivClusteringS2S":22.67,"MedrxivClusteringP2P":28.41,"MedrxivClusteringS2S":26.14,"RedditClustering":46.29,"RedditClusteringP2P":50.95,"StackExchangeClustering":48.08,"StackExchangeClusteringP2P":33.35,"TwentyNewsgroupsClustering":40.79} -{"index":260,"Rank":170,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":35.06,"ArxivClusteringP2P":34.75,"ArxivClusteringS2S":22.62,"BiorxivClusteringP2P":28.75,"BiorxivClusteringS2S":20.14,"MedrxivClusteringP2P":31.2,"MedrxivClusteringS2S":25.8,"RedditClustering":46.17,"RedditClusteringP2P":56.53,"StackExchangeClustering":49.34,"StackExchangeClusteringP2P":33.4,"TwentyNewsgroupsClustering":36.93} -{"index":259,"Rank":171,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":34.82,"ArxivClusteringP2P":33.72,"ArxivClusteringS2S":23.99,"BiorxivClusteringP2P":30.88,"BiorxivClusteringS2S":21.05,"MedrxivClusteringP2P":28.68,"MedrxivClusteringS2S":24.25,"RedditClustering":43.82,"RedditClusteringP2P":58.37,"StackExchangeClustering":47.83,"StackExchangeClusteringP2P":33.01,"TwentyNewsgroupsClustering":37.47} -{"index":231,"Rank":172,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":34.06,"ArxivClusteringP2P":44.75,"ArxivClusteringS2S":35.27,"BiorxivClusteringP2P":39.52,"BiorxivClusteringS2S":34.53,"MedrxivClusteringP2P":35.04,"MedrxivClusteringS2S":31.66,"RedditClustering":24.13,"RedditClusteringP2P":35.06,"StackExchangeClustering":39.01,"StackExchangeClusteringP2P":31.46,"TwentyNewsgroupsClustering":24.22} -{"index":251,"Rank":173,"Model":"Angle_BERT<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":33.47,"ArxivClusteringP2P":35.33,"ArxivClusteringS2S":27.66,"BiorxivClusteringP2P":30.95,"BiorxivClusteringS2S":24.81,"MedrxivClusteringP2P":27.35,"MedrxivClusteringS2S":25.59,"RedditClustering":40.32,"RedditClusteringP2P":45.99,"StackExchangeClustering":48.26,"StackExchangeClusteringP2P":28.87,"TwentyNewsgroupsClustering":33.07} -{"index":217,"Rank":174,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":33.43,"ArxivClusteringP2P":35.18,"ArxivClusteringS2S":27.54,"BiorxivClusteringP2P":30.15,"BiorxivClusteringS2S":24.67,"MedrxivClusteringP2P":26.25,"MedrxivClusteringS2S":24.12,"RedditClustering":40.23,"RedditClusteringP2P":47.74,"StackExchangeClustering":47.55,"StackExchangeClusteringP2P":29.45,"TwentyNewsgroupsClustering":34.86} -{"index":78,"Rank":175,"Model":"test25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":33.15,"ArxivClusteringP2P":37.86,"ArxivClusteringS2S":28.84,"BiorxivClusteringP2P":32.09,"BiorxivClusteringS2S":23.55,"MedrxivClusteringP2P":28.54,"MedrxivClusteringS2S":24.73,"RedditClustering":35.54,"RedditClusteringP2P":47.43,"StackExchangeClustering":46.54,"StackExchangeClusteringP2P":30.88,"TwentyNewsgroupsClustering":28.68} -{"index":72,"Rank":176,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":32.91,"ArxivClusteringP2P":35.84,"ArxivClusteringS2S":27.3,"BiorxivClusteringP2P":32.35,"BiorxivClusteringS2S":24.06,"MedrxivClusteringP2P":28.24,"MedrxivClusteringS2S":25.34,"RedditClustering":35.92,"RedditClusteringP2P":46.08,"StackExchangeClustering":45.31,"StackExchangeClusteringP2P":29.91,"TwentyNewsgroupsClustering":31.69} -{"index":173,"Rank":177,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":32.56,"ArxivClusteringP2P":34.17,"ArxivClusteringS2S":23.99,"BiorxivClusteringP2P":28.51,"BiorxivClusteringS2S":20.94,"MedrxivClusteringP2P":27.24,"MedrxivClusteringS2S":23.27,"RedditClustering":37.95,"RedditClusteringP2P":49.91,"StackExchangeClustering":46.35,"StackExchangeClusteringP2P":31.46,"TwentyNewsgroupsClustering":34.39} -{"index":285,"Rank":178,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":32.28,"ArxivClusteringP2P":35.27,"ArxivClusteringS2S":23.18,"BiorxivClusteringP2P":31.13,"BiorxivClusteringS2S":26.78,"MedrxivClusteringP2P":24.65,"MedrxivClusteringS2S":24.21,"RedditClustering":38.74,"RedditClusteringP2P":51.92,"StackExchangeClustering":42.7,"StackExchangeClusteringP2P":28.7,"TwentyNewsgroupsClustering":27.82} -{"index":30,"Rank":179,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":31.67,"ArxivClusteringP2P":33.7,"ArxivClusteringS2S":23.04,"BiorxivClusteringP2P":32.7,"BiorxivClusteringS2S":23.28,"MedrxivClusteringP2P":31.94,"MedrxivClusteringS2S":28.05,"RedditClustering":30.83,"RedditClusteringP2P":46.29,"StackExchangeClustering":39.44,"StackExchangeClusteringP2P":32.61,"TwentyNewsgroupsClustering":26.54} -{"index":81,"Rank":180,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":30.95,"ArxivClusteringP2P":34.74,"ArxivClusteringS2S":24.68,"BiorxivClusteringP2P":28.93,"BiorxivClusteringS2S":23.08,"MedrxivClusteringP2P":28.3,"MedrxivClusteringS2S":24.93,"RedditClustering":33.76,"RedditClusteringP2P":41.01,"StackExchangeClustering":44.59,"StackExchangeClusteringP2P":28.23,"TwentyNewsgroupsClustering":28.24} -{"index":248,"Rank":181,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":30.61,"ArxivClusteringP2P":32.32,"ArxivClusteringS2S":25.5,"BiorxivClusteringP2P":28.99,"BiorxivClusteringS2S":23.2,"MedrxivClusteringP2P":29.44,"MedrxivClusteringS2S":26.16,"RedditClustering":31.25,"RedditClusteringP2P":43.3,"StackExchangeClustering":34.36,"StackExchangeClusteringP2P":30.64,"TwentyNewsgroupsClustering":31.58} -{"index":70,"Rank":182,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":30.21,"ArxivClusteringP2P":33.04,"ArxivClusteringS2S":24.68,"BiorxivClusteringP2P":30.86,"BiorxivClusteringS2S":21.1,"MedrxivClusteringP2P":27.17,"MedrxivClusteringS2S":23.78,"RedditClustering":31.45,"RedditClusteringP2P":43.69,"StackExchangeClustering":39.16,"StackExchangeClusteringP2P":29.18,"TwentyNewsgroupsClustering":28.23} -{"index":122,"Rank":183,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":30.12,"ArxivClusteringP2P":35.19,"ArxivClusteringS2S":27.51,"BiorxivClusteringP2P":30.12,"BiorxivClusteringS2S":24.77,"MedrxivClusteringP2P":26.09,"MedrxivClusteringS2S":23.6,"RedditClustering":27.24,"RedditClusteringP2P":43.32,"StackExchangeClustering":43.58,"StackExchangeClusteringP2P":26.55,"TwentyNewsgroupsClustering":23.35} -{"index":227,"Rank":184,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":29.55,"ArxivClusteringP2P":32.13,"ArxivClusteringS2S":22.05,"BiorxivClusteringP2P":29.84,"BiorxivClusteringS2S":20.57,"MedrxivClusteringP2P":30.13,"MedrxivClusteringS2S":24.82,"RedditClustering":28.79,"RedditClusteringP2P":49.14,"StackExchangeClustering":35.43,"StackExchangeClusteringP2P":28.83,"TwentyNewsgroupsClustering":23.28} -{"index":218,"Rank":185,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":29.04,"ArxivClusteringP2P":32.61,"ArxivClusteringS2S":24.68,"BiorxivClusteringP2P":24.9,"BiorxivClusteringS2S":19.55,"MedrxivClusteringP2P":23.6,"MedrxivClusteringS2S":21.97,"RedditClustering":32.18,"RedditClusteringP2P":45.14,"StackExchangeClustering":43.07,"StackExchangeClusteringP2P":28.5,"TwentyNewsgroupsClustering":23.21} -{"index":256,"Rank":186,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.04,"ArxivClusteringP2P":38.31,"ArxivClusteringS2S":27.56,"BiorxivClusteringP2P":33.35,"BiorxivClusteringS2S":24.18,"MedrxivClusteringP2P":28.35,"MedrxivClusteringS2S":23.71,"RedditClustering":22.47,"RedditClusteringP2P":39.66,"StackExchangeClustering":32.9,"StackExchangeClusteringP2P":26.0,"TwentyNewsgroupsClustering":22.9} -{"index":71,"Rank":187,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":28.01,"ArxivClusteringP2P":31.76,"ArxivClusteringS2S":21.06,"BiorxivClusteringP2P":29.84,"BiorxivClusteringS2S":18.34,"MedrxivClusteringP2P":27.42,"MedrxivClusteringS2S":22.41,"RedditClustering":26.71,"RedditClusteringP2P":44.14,"StackExchangeClustering":32.84,"StackExchangeClusteringP2P":27.48,"TwentyNewsgroupsClustering":26.09} -{"index":232,"Rank":188,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":27.73,"ArxivClusteringP2P":32.56,"ArxivClusteringS2S":23.14,"BiorxivClusteringP2P":29.27,"BiorxivClusteringS2S":19.18,"MedrxivClusteringP2P":26.12,"MedrxivClusteringS2S":20.38,"RedditClustering":28.46,"RedditClusteringP2P":35.82,"StackExchangeClustering":35.8,"StackExchangeClusteringP2P":28.51,"TwentyNewsgroupsClustering":25.83} -{"index":233,"Rank":189,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":26.57,"ArxivClusteringP2P":34.73,"ArxivClusteringS2S":26.01,"BiorxivClusteringP2P":29.76,"BiorxivClusteringS2S":20.71,"MedrxivClusteringP2P":26.65,"MedrxivClusteringS2S":21.5,"RedditClustering":28.84,"RedditClusteringP2P":7.37,"StackExchangeClustering":39.04,"StackExchangeClusteringP2P":30.23,"TwentyNewsgroupsClustering":27.42} -{"index":141,"Rank":190,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":26.2,"ArxivClusteringP2P":26.81,"ArxivClusteringS2S":24.35,"BiorxivClusteringP2P":20.62,"BiorxivClusteringS2S":19.08,"MedrxivClusteringP2P":19.06,"MedrxivClusteringS2S":19.8,"RedditClustering":28.52,"RedditClusteringP2P":38.63,"StackExchangeClustering":46.33,"StackExchangeClusteringP2P":20.57,"TwentyNewsgroupsClustering":24.41} -{"index":255,"Rank":191,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":18.0,"ArxivClusteringP2P":14.79,"ArxivClusteringS2S":12.25,"BiorxivClusteringP2P":13.94,"BiorxivClusteringS2S":9.79,"MedrxivClusteringP2P":15.7,"MedrxivClusteringS2S":14.89,"RedditClustering":18.38,"RedditClusteringP2P":27.1,"StackExchangeClustering":23.66,"StackExchangeClusteringP2P":27.34,"TwentyNewsgroupsClustering":20.17} -{"index":11,"Rank":192,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":15.28,"ArxivClusteringP2P":17.77,"ArxivClusteringS2S":12.39,"BiorxivClusteringP2P":12.4,"BiorxivClusteringS2S":8.83,"MedrxivClusteringP2P":17.91,"MedrxivClusteringS2S":16.63,"RedditClustering":9.96,"RedditClusteringP2P":26.42,"StackExchangeClustering":15.79,"StackExchangeClusteringP2P":18.63,"TwentyNewsgroupsClustering":11.38} -{"index":263,"Rank":193,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":0.41,"ArxivClusteringP2P":0.44,"ArxivClusteringS2S":0.37,"BiorxivClusteringP2P":0.36,"BiorxivClusteringS2S":0.31,"MedrxivClusteringP2P":0.32,"MedrxivClusteringS2S":0.29,"RedditClustering":0.5,"RedditClusteringP2P":0.55,"StackExchangeClustering":0.58,"StackExchangeClusteringP2P":0.3,"TwentyNewsgroupsClustering":0.46} -{"index":37,"Rank":206,"Model":"openai_clip_embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":"","ArxivClusteringS2S":"","BiorxivClusteringP2P":29.33,"BiorxivClusteringS2S":27.81,"MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":"","RedditClusteringP2P":"","StackExchangeClustering":"","StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":""} -{"index":74,"Rank":223,"Model":"gte-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":41.14,"ArxivClusteringS2S":31.79,"BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":"","RedditClusteringP2P":"","StackExchangeClustering":"","StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":""} -{"index":75,"Rank":224,"Model":"gte-micro-v3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":41.14,"ArxivClusteringS2S":31.79,"BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":"","RedditClusteringP2P":"","StackExchangeClustering":"","StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":""} -{"index":97,"Rank":234,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":"","ArxivClusteringS2S":30.15,"BiorxivClusteringP2P":"","BiorxivClusteringS2S":24.29,"MedrxivClusteringP2P":"","MedrxivClusteringS2S":23.01,"RedditClustering":21.29,"RedditClusteringP2P":"","StackExchangeClustering":35.55,"StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":18.35} -{"index":98,"Rank":235,"Model":"yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":"","ArxivClusteringS2S":"","BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":31.38,"MedrxivClusteringS2S":30.28,"RedditClustering":"","RedditClusteringP2P":50.51,"StackExchangeClustering":51.79,"StackExchangeClusteringP2P":30.15,"TwentyNewsgroupsClustering":41.38} -{"index":124,"Rank":239,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":"","ArxivClusteringP2P":39.41,"ArxivClusteringS2S":"","BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":"","RedditClusteringP2P":"","StackExchangeClustering":"","StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":""} -{"index":234,"Rank":267,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","ArxivClusteringP2P":33.59,"ArxivClusteringS2S":"","BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":"","RedditClusteringP2P":"","StackExchangeClustering":"","StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":""} -{"index":272,"Rank":279,"Model":"text-search-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":"","ArxivClusteringS2S":"","BiorxivClusteringP2P":"","BiorxivClusteringS2S":26.05,"MedrxivClusteringP2P":"","MedrxivClusteringS2S":25.67,"RedditClustering":"","RedditClusteringP2P":"","StackExchangeClustering":"","StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":44.92} -{"index":273,"Rank":280,"Model":"text-search-ada-doc-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":"","ArxivClusteringS2S":"","BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":"","RedditClusteringP2P":"","StackExchangeClustering":"","StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":32.92} -{"index":278,"Rank":284,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":"","ArxivClusteringS2S":"","BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":45.64,"RedditClusteringP2P":"","StackExchangeClustering":53.01,"StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":42.01} -{"index":279,"Rank":285,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":"","ArxivClusteringS2S":"","BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":40.79,"RedditClusteringP2P":"","StackExchangeClustering":55.14,"StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":37.64} -{"index":280,"Rank":286,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":"","ArxivClusteringS2S":"","BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":31.78,"RedditClusteringP2P":"","StackExchangeClustering":36.86,"StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":29.33} +{"Rank":1,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.67,"AlloProfClusteringP2P":64.83,"AlloProfClusteringS2S":53.52,"HALClusteringS2S":26.18,"MLSUMClusteringP2P (fr)":44.59,"MLSUMClusteringS2S (fr)":41.67,"MasakhaNEWSClusteringP2P (fra)":68.35,"MasakhaNEWSClusteringS2S (fra)":48.58} +{"Rank":2,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.57,"AlloProfClusteringP2P":65.37,"AlloProfClusteringS2S":47.03,"HALClusteringS2S":27.67,"MLSUMClusteringP2P (fr)":45.99,"MLSUMClusteringS2S (fr)":45.57,"MasakhaNEWSClusteringP2P (fra)":44.53,"MasakhaNEWSClusteringS2S (fra)":49.8} +{"Rank":3,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.46,"AlloProfClusteringP2P":61.63,"AlloProfClusteringS2S":50.67,"HALClusteringS2S":27.44,"MLSUMClusteringP2P (fr)":45.23,"MLSUMClusteringS2S (fr)":41.48,"MasakhaNEWSClusteringP2P (fra)":56.59,"MasakhaNEWSClusteringS2S (fra)":35.18} +{"Rank":4,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.74,"AlloProfClusteringP2P":62.01,"AlloProfClusteringS2S":49.2,"HALClusteringS2S":26.17,"MLSUMClusteringP2P (fr)":45.28,"MLSUMClusteringS2S (fr)":42.74,"MasakhaNEWSClusteringP2P (fra)":48.13,"MasakhaNEWSClusteringS2S (fra)":39.62} +{"Rank":5,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.23,"AlloProfClusteringP2P":62.5,"AlloProfClusteringS2S":44.28,"HALClusteringS2S":26.36,"MLSUMClusteringP2P (fr)":44.03,"MLSUMClusteringS2S (fr)":42.95,"MasakhaNEWSClusteringP2P (fra)":50.68,"MasakhaNEWSClusteringS2S (fra)":38.79} +{"Rank":6,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":43.25,"AlloProfClusteringP2P":60.98,"AlloProfClusteringS2S":43.5,"HALClusteringS2S":21.4,"MLSUMClusteringP2P (fr)":42.24,"MLSUMClusteringS2S (fr)":35.25,"MasakhaNEWSClusteringP2P (fra)":61.15,"MasakhaNEWSClusteringS2S (fra)":38.24} +{"Rank":7,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":43.17,"AlloProfClusteringP2P":62.69,"AlloProfClusteringS2S":42.06,"HALClusteringS2S":23.9,"MLSUMClusteringP2P (fr)":42.04,"MLSUMClusteringS2S (fr)":32.29,"MasakhaNEWSClusteringP2P (fra)":54.51,"MasakhaNEWSClusteringS2S (fra)":44.73} +{"Rank":8,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":42.66,"AlloProfClusteringP2P":62.09,"AlloProfClusteringS2S":32.98,"HALClusteringS2S":22.48,"MLSUMClusteringP2P (fr)":43.48,"MLSUMClusteringS2S (fr)":38.53,"MasakhaNEWSClusteringP2P (fra)":47.91,"MasakhaNEWSClusteringS2S (fra)":51.16} +{"Rank":9,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.0,"AlloProfClusteringP2P":56.9,"AlloProfClusteringS2S":37.84,"HALClusteringS2S":18.95,"MLSUMClusteringP2P (fr)":43.9,"MLSUMClusteringS2S (fr)":35.5,"MasakhaNEWSClusteringP2P (fra)":60.57,"MasakhaNEWSClusteringS2S (fra)":40.31} +{"Rank":10,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.88,"AlloProfClusteringP2P":57.96,"AlloProfClusteringS2S":41.65,"HALClusteringS2S":24.84,"MLSUMClusteringP2P (fr)":45.08,"MLSUMClusteringS2S (fr)":38.77,"MasakhaNEWSClusteringP2P (fra)":48.54,"MasakhaNEWSClusteringS2S (fra)":36.33} +{"Rank":11,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":41.7,"AlloProfClusteringP2P":64.12,"AlloProfClusteringS2S":32.52,"HALClusteringS2S":23.4,"MLSUMClusteringP2P (fr)":42.94,"MLSUMClusteringS2S (fr)":33.91,"MasakhaNEWSClusteringP2P (fra)":53.94,"MasakhaNEWSClusteringS2S (fra)":41.05} +{"Rank":12,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":41.6,"AlloProfClusteringP2P":60.37,"AlloProfClusteringS2S":40.76,"HALClusteringS2S":20.28,"MLSUMClusteringP2P (fr)":41.61,"MLSUMClusteringS2S (fr)":33.6,"MasakhaNEWSClusteringP2P (fra)":62.82,"MasakhaNEWSClusteringS2S (fra)":31.74} +{"Rank":13,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":41.16,"AlloProfClusteringP2P":61.06,"AlloProfClusteringS2S":28.12,"HALClusteringS2S":19.69,"MLSUMClusteringP2P (fr)":45.59,"MLSUMClusteringS2S (fr)":32.0,"MasakhaNEWSClusteringP2P (fra)":52.47,"MasakhaNEWSClusteringS2S (fra)":49.2} +{"Rank":14,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.7,"AlloProfClusteringP2P":63.53,"AlloProfClusteringS2S":36.18,"HALClusteringS2S":19.9,"MLSUMClusteringP2P (fr)":45.08,"MLSUMClusteringS2S (fr)":34.75,"MasakhaNEWSClusteringP2P (fra)":53.18,"MasakhaNEWSClusteringS2S (fra)":32.31} +{"Rank":15,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":40.6,"AlloProfClusteringP2P":61.82,"AlloProfClusteringS2S":39.78,"HALClusteringS2S":18.73,"MLSUMClusteringP2P (fr)":42.07,"MLSUMClusteringS2S (fr)":31.87,"MasakhaNEWSClusteringP2P (fra)":58.6,"MasakhaNEWSClusteringS2S (fra)":31.33} +{"Rank":16,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":40.43,"AlloProfClusteringP2P":60.66,"AlloProfClusteringS2S":35.05,"HALClusteringS2S":20.9,"MLSUMClusteringP2P (fr)":43.5,"MLSUMClusteringS2S (fr)":30.99,"MasakhaNEWSClusteringP2P (fra)":49.71,"MasakhaNEWSClusteringS2S (fra)":42.23} +{"Rank":17,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":40.07,"AlloProfClusteringP2P":58.44,"AlloProfClusteringS2S":35.93,"HALClusteringS2S":17.72,"MLSUMClusteringP2P (fr)":40.77,"MLSUMClusteringS2S (fr)":30.06,"MasakhaNEWSClusteringP2P (fra)":61.9,"MasakhaNEWSClusteringS2S (fra)":35.64} +{"Rank":18,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":39.11,"AlloProfClusteringP2P":54.49,"AlloProfClusteringS2S":44.79,"HALClusteringS2S":23.97,"MLSUMClusteringP2P (fr)":40.55,"MLSUMClusteringS2S (fr)":37.53,"MasakhaNEWSClusteringP2P (fra)":41.57,"MasakhaNEWSClusteringS2S (fra)":30.88} +{"Rank":19,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":38.93,"AlloProfClusteringP2P":60.89,"AlloProfClusteringS2S":32.52,"HALClusteringS2S":18.95,"MLSUMClusteringP2P (fr)":43.2,"MLSUMClusteringS2S (fr)":37.61,"MasakhaNEWSClusteringP2P (fra)":40.12,"MasakhaNEWSClusteringS2S (fra)":39.22} +{"Rank":20,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.91,"AlloProfClusteringP2P":61.96,"AlloProfClusteringS2S":31.36,"HALClusteringS2S":17.31,"MLSUMClusteringP2P (fr)":42.8,"MLSUMClusteringS2S (fr)":32.72,"MasakhaNEWSClusteringP2P (fra)":56.81,"MasakhaNEWSClusteringS2S (fra)":29.41} +{"Rank":21,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":38.7,"AlloProfClusteringP2P":62.99,"AlloProfClusteringS2S":32.26,"HALClusteringS2S":22.44,"MLSUMClusteringP2P (fr)":44.04,"MLSUMClusteringS2S (fr)":37.65,"MasakhaNEWSClusteringP2P (fra)":40.94,"MasakhaNEWSClusteringS2S (fra)":30.56} +{"Rank":22,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":38.7,"AlloProfClusteringP2P":55.95,"AlloProfClusteringS2S":35.39,"HALClusteringS2S":18.2,"MLSUMClusteringP2P (fr)":40.17,"MLSUMClusteringS2S (fr)":34.65,"MasakhaNEWSClusteringP2P (fra)":53.76,"MasakhaNEWSClusteringS2S (fra)":32.76} +{"Rank":23,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":38.35,"AlloProfClusteringP2P":56.06,"AlloProfClusteringS2S":42.16,"HALClusteringS2S":23.21,"MLSUMClusteringP2P (fr)":39.97,"MLSUMClusteringS2S (fr)":36.55,"MasakhaNEWSClusteringP2P (fra)":36.58,"MasakhaNEWSClusteringS2S (fra)":33.9} +{"Rank":24,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":38.32,"AlloProfClusteringP2P":54.78,"AlloProfClusteringS2S":31.6,"HALClusteringS2S":20.62,"MLSUMClusteringP2P (fr)":42.09,"MLSUMClusteringS2S (fr)":34.84,"MasakhaNEWSClusteringP2P (fra)":46.16,"MasakhaNEWSClusteringS2S (fra)":38.13} +{"Rank":25,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.35,"AlloProfClusteringP2P":54.21,"AlloProfClusteringS2S":37.95,"HALClusteringS2S":18.94,"MLSUMClusteringP2P (fr)":41.02,"MLSUMClusteringS2S (fr)":37.97,"MasakhaNEWSClusteringP2P (fra)":24.09,"MasakhaNEWSClusteringS2S (fra)":40.24} +{"Rank":26,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":35.44,"AlloProfClusteringP2P":59.09,"AlloProfClusteringS2S":38.92,"HALClusteringS2S":20.22,"MLSUMClusteringP2P (fr)":35.98,"MLSUMClusteringS2S (fr)":27.05,"MasakhaNEWSClusteringP2P (fra)":36.03,"MasakhaNEWSClusteringS2S (fra)":30.77} +{"Rank":27,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":35.04,"AlloProfClusteringP2P":62.22,"AlloProfClusteringS2S":27.06,"HALClusteringS2S":13.86,"MLSUMClusteringP2P (fr)":44.11,"MLSUMClusteringS2S (fr)":30.47,"MasakhaNEWSClusteringP2P (fra)":40.2,"MasakhaNEWSClusteringS2S (fra)":27.35} +{"Rank":28,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":34.73,"AlloProfClusteringP2P":51.83,"AlloProfClusteringS2S":32.07,"HALClusteringS2S":18.84,"MLSUMClusteringP2P (fr)":36.74,"MLSUMClusteringS2S (fr)":28.12,"MasakhaNEWSClusteringP2P (fra)":34.92,"MasakhaNEWSClusteringS2S (fra)":40.58} +{"Rank":29,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":34.09,"AlloProfClusteringP2P":53.16,"AlloProfClusteringS2S":43.43,"HALClusteringS2S":20.26,"MLSUMClusteringP2P (fr)":41.22,"MLSUMClusteringS2S (fr)":31.88,"MasakhaNEWSClusteringP2P (fra)":24.23,"MasakhaNEWSClusteringS2S (fra)":24.46} +{"Rank":30,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":33.96,"AlloProfClusteringP2P":53.49,"AlloProfClusteringS2S":43.1,"HALClusteringS2S":19.78,"MLSUMClusteringP2P (fr)":40.73,"MLSUMClusteringS2S (fr)":31.94,"MasakhaNEWSClusteringP2P (fra)":24.23,"MasakhaNEWSClusteringS2S (fra)":24.46} +{"Rank":31,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":33.94,"AlloProfClusteringP2P":53.22,"AlloProfClusteringS2S":42.92,"HALClusteringS2S":19.94,"MLSUMClusteringP2P (fr)":40.96,"MLSUMClusteringS2S (fr)":31.87,"MasakhaNEWSClusteringP2P (fra)":24.23,"MasakhaNEWSClusteringS2S (fra)":24.46} +{"Rank":32,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":33.82,"AlloProfClusteringP2P":51.5,"AlloProfClusteringS2S":43.06,"HALClusteringS2S":20.81,"MLSUMClusteringP2P (fr)":40.9,"MLSUMClusteringS2S (fr)":31.8,"MasakhaNEWSClusteringP2P (fra)":24.23,"MasakhaNEWSClusteringS2S (fra)":24.46} +{"Rank":33,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":33.75,"AlloProfClusteringP2P":46.03,"AlloProfClusteringS2S":31.83,"HALClusteringS2S":19.58,"MLSUMClusteringP2P (fr)":34.35,"MLSUMClusteringS2S (fr)":29.3,"MasakhaNEWSClusteringP2P (fra)":42.72,"MasakhaNEWSClusteringS2S (fra)":32.47} +{"Rank":34,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":33.66,"AlloProfClusteringP2P":49.11,"AlloProfClusteringS2S":32.72,"HALClusteringS2S":16.19,"MLSUMClusteringP2P (fr)":36.19,"MLSUMClusteringS2S (fr)":30.39,"MasakhaNEWSClusteringP2P (fra)":38.51,"MasakhaNEWSClusteringS2S (fra)":32.51} +{"Rank":35,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":32.84,"AlloProfClusteringP2P":49.13,"AlloProfClusteringS2S":26.16,"HALClusteringS2S":12.49,"MLSUMClusteringP2P (fr)":35.15,"MLSUMClusteringS2S (fr)":25.95,"MasakhaNEWSClusteringP2P (fra)":53.73,"MasakhaNEWSClusteringS2S (fra)":27.27} +{"Rank":36,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":29.92,"AlloProfClusteringP2P":48.45,"AlloProfClusteringS2S":25.81,"HALClusteringS2S":11.52,"MLSUMClusteringP2P (fr)":34.53,"MLSUMClusteringS2S (fr)":27.35,"MasakhaNEWSClusteringP2P (fra)":32.04,"MasakhaNEWSClusteringS2S (fra)":29.77} +{"Rank":37,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.03,"AlloProfClusteringP2P":53.57,"AlloProfClusteringS2S":22.13,"HALClusteringS2S":7.68,"MLSUMClusteringP2P (fr)":36.43,"MLSUMClusteringS2S (fr)":25.26,"MasakhaNEWSClusteringP2P (fra)":37.57,"MasakhaNEWSClusteringS2S (fra)":20.58} +{"Rank":38,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":28.62,"AlloProfClusteringP2P":56.54,"AlloProfClusteringS2S":21.18,"HALClusteringS2S":5.94,"MLSUMClusteringP2P (fr)":42.67,"MLSUMClusteringS2S (fr)":18.5,"MasakhaNEWSClusteringP2P (fra)":34.02,"MasakhaNEWSClusteringS2S (fra)":21.52} +{"Rank":39,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":28.42,"AlloProfClusteringP2P":52.24,"AlloProfClusteringS2S":20.37,"HALClusteringS2S":8.68,"MLSUMClusteringP2P (fr)":40.44,"MLSUMClusteringS2S (fr)":24.14,"MasakhaNEWSClusteringP2P (fra)":29.29,"MasakhaNEWSClusteringS2S (fra)":23.76} +{"Rank":40,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":27.18,"AlloProfClusteringP2P":52.86,"AlloProfClusteringS2S":14.46,"HALClusteringS2S":3.85,"MLSUMClusteringP2P (fr)":39.06,"MLSUMClusteringS2S (fr)":17.13,"MasakhaNEWSClusteringP2P (fra)":41.61,"MasakhaNEWSClusteringS2S (fra)":21.26} +{"Rank":41,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":25.11,"AlloProfClusteringP2P":40.85,"AlloProfClusteringS2S":21.76,"HALClusteringS2S":5.26,"MLSUMClusteringP2P (fr)":38.09,"MLSUMClusteringS2S (fr)":18.71,"MasakhaNEWSClusteringP2P (fra)":26.43,"MasakhaNEWSClusteringS2S (fra)":24.68} +{"Rank":42,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":22.45,"AlloProfClusteringP2P":43.2,"AlloProfClusteringS2S":12.94,"HALClusteringS2S":1.8,"MLSUMClusteringP2P (fr)":33.22,"MLSUMClusteringS2S (fr)":14.9,"MasakhaNEWSClusteringP2P (fra)":28.49,"MasakhaNEWSClusteringS2S (fra)":22.58} +{"Rank":43,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":44,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":45,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":46,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":47,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":48,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":49,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":50,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":51,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":52,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":53,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":54,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":55,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":56,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} diff --git a/all_data_tasks/10/default.jsonl b/all_data_tasks/10/default.jsonl index c9c186940c2ef2cd2071d7e111f9214c1275c1ec..b17c2fe3485a482025108f54e4e5ba67d169f09f 100644 --- a/all_data_tasks/10/default.jsonl +++ b/all_data_tasks/10/default.jsonl @@ -1,66 +1,112 @@ -{"index":207,"Rank":1,"Model":"xiaobu-embedding-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":91.87,"Cmnli":92.84,"Ocnli":90.9} -{"index":169,"Rank":2,"Model":"zpoint_large_embedding_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":91.55,"Cmnli":92.54,"Ocnli":90.56} -{"index":50,"Rank":3,"Model":"IYun-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":90.87,"Cmnli":90.87,"Ocnli":90.87} -{"index":253,"Rank":4,"Model":"piccolo-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":90.24,"Cmnli":92.64,"Ocnli":87.84} -{"index":38,"Rank":5,"Model":"Yinka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":89.88,"Cmnli":92.49,"Ocnli":87.26} -{"index":46,"Rank":6,"Model":"Dmeta-embedding-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.92,"Cmnli":90.66,"Ocnli":87.18} -{"index":15,"Rank":7,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":88.52,"Cmnli":91.81,"Ocnli":85.22} -{"index":154,"Rank":8,"Model":"stella-large-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.1,"Cmnli":90.77,"Ocnli":85.44} -{"index":12,"Rank":9,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.1,"Cmnli":90.77,"Ocnli":85.44} -{"index":155,"Rank":10,"Model":"stella-mrl-large-zh-v3.5-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.08,"Cmnli":90.66,"Ocnli":85.51} -{"index":116,"Rank":11,"Model":"stella-mrl-large-zh-v3.5-1792d-1024<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.08,"Cmnli":90.66,"Ocnli":85.51} -{"index":173,"Rank":12,"Model":"stella-base-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.93,"Cmnli":90.42,"Ocnli":85.43} -{"index":129,"Rank":13,"Model":"acge_text_embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.84,"Cmnli":90.49,"Ocnli":85.18} -{"index":47,"Rank":14,"Model":"Dmeta-embedding-zh-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.57,"Cmnli":90.29,"Ocnli":84.85} -{"index":17,"Rank":15,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":87.48,"Cmnli":90.18,"Ocnli":84.78} -{"index":142,"Rank":16,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.48,"Cmnli":90.18,"Ocnli":84.78} -{"index":234,"Rank":17,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.48,"Cmnli":90.18,"Ocnli":84.78} -{"index":276,"Rank":18,"Model":"AGE_Hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.08,"Cmnli":89.42,"Ocnli":84.74} -{"index":16,"Rank":19,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.91,"Cmnli":86.22,"Ocnli":87.6} -{"index":233,"Rank":20,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.91,"Cmnli":86.22,"Ocnli":87.6} -{"index":29,"Rank":21,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.67,"Cmnli":90.13,"Ocnli":83.21} -{"index":206,"Rank":22,"Model":"xiaobu-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.3,"Cmnli":89.95,"Ocnli":80.64} -{"index":286,"Rank":23,"Model":"gte-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.41,"Cmnli":89.5,"Ocnli":79.32} -{"index":105,"Rank":24,"Model":"alime-embedding-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.34,"Cmnli":89.36,"Ocnli":79.31} -{"index":199,"Rank":25,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.94,"Cmnli":85.27,"Ocnli":80.62} -{"index":280,"Rank":26,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.94,"Cmnli":85.27,"Ocnli":80.62} -{"index":175,"Rank":27,"Model":"stella-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.68,"Cmnli":86.85,"Ocnli":78.51} -{"index":20,"Rank":28,"Model":"tao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.68,"Cmnli":86.85,"Ocnli":78.5} -{"index":21,"Rank":29,"Model":"tao-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.68,"Cmnli":86.85,"Ocnli":78.5} -{"index":40,"Rank":30,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.38,"Cmnli":87.3,"Ocnli":77.46} -{"index":41,"Rank":31,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.38,"Cmnli":87.55,"Ocnli":77.21} -{"index":315,"Rank":32,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.32,"Cmnli":85.31,"Ocnli":79.33} -{"index":27,"Rank":33,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":81.6,"Cmnli":85.27,"Ocnli":77.94} -{"index":48,"Rank":34,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.46,"Cmnli":85.92,"Ocnli":77.01} -{"index":49,"Rank":35,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":80.9,"Cmnli":86.7,"Ocnli":75.1} -{"index":284,"Rank":36,"Model":"gte-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.44,"Cmnli":85.6,"Ocnli":75.28} -{"index":51,"Rank":37,"Model":"mist-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.01,"Cmnli":84.76,"Ocnli":75.26} -{"index":172,"Rank":38,"Model":"stella-base-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.96,"Cmnli":84.75,"Ocnli":75.16} -{"index":23,"Rank":39,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":79.76,"Cmnli":84.1,"Ocnli":75.41} -{"index":39,"Rank":40,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.77,"Cmnli":84.1,"Ocnli":73.44} -{"index":174,"Rank":41,"Model":"stella-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.72,"Cmnli":85.14,"Ocnli":72.29} -{"index":191,"Rank":42,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.65,"Cmnli":85.14,"Ocnli":72.15} -{"index":252,"Rank":43,"Model":"piccolo-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.38,"Cmnli":83.83,"Ocnli":72.92} -{"index":26,"Rank":44,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":76.77,"Cmnli":82.17,"Ocnli":71.37} -{"index":251,"Rank":45,"Model":"piccolo-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.61,"Cmnli":82.33,"Ocnli":70.89} -{"index":189,"Rank":46,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.59,"Cmnli":83.32,"Ocnli":69.86} -{"index":171,"Rank":47,"Model":"stella-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.09,"Cmnli":81.65,"Ocnli":70.53} -{"index":188,"Rank":48,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.96,"Cmnli":81.34,"Ocnli":68.58} -{"index":208,"Rank":49,"Model":"lim_base_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.64,"Cmnli":79.58,"Ocnli":69.7} -{"index":190,"Rank":50,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.38,"Cmnli":78.43,"Ocnli":66.32} -{"index":180,"Rank":51,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":72.19,"Cmnli":80.21,"Ocnli":64.18} -{"index":53,"Rank":52,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":70.86,"Cmnli":77.67,"Ocnli":64.04} -{"index":33,"Rank":53,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":70.4,"Cmnli":76.24,"Ocnli":64.57} -{"index":288,"Rank":54,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.0,"Cmnli":76.46,"Ocnli":63.54} -{"index":184,"Rank":55,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":69.89,"Cmnli":78.18,"Ocnli":61.6} -{"index":311,"Rank":56,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.56,"Cmnli":76.03,"Ocnli":63.08} -{"index":274,"Rank":57,"Model":"ZNV-Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.76,"Cmnli":75.67,"Ocnli":61.85} -{"index":277,"Rank":58,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":67.41,"Cmnli":73.87,"Ocnli":60.95} -{"index":183,"Rank":59,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":67.07,"Cmnli":74.51,"Ocnli":59.63} -{"index":279,"Rank":60,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":66.62,"Cmnli":72.55,"Ocnli":60.7} -{"index":186,"Rank":61,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":66.45,"Cmnli":72.12,"Ocnli":60.77} -{"index":221,"Rank":62,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":64.3,"Cmnli":69.27,"Ocnli":59.33} -{"index":220,"Rank":63,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":63.99,"Cmnli":69.98,"Ocnli":58.0} -{"index":226,"Rank":64,"Model":"m3e-ernie-xbase-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.84,"Cmnli":65.68,"Ocnli":59.99} -{"index":159,"Rank":65,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.52,"Cmnli":61.86,"Ocnli":55.18} -{"index":289,"Rank":66,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.7,"Cmnli":57.78,"Ocnli":55.63} +{"Rank":1,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":88.52,"Cmnli":91.81,"Ocnli":85.22} +{"Rank":2,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.1,"Cmnli":90.77,"Ocnli":85.44} +{"Rank":3,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.32,"Cmnli":85.31,"Ocnli":79.33} +{"Rank":4,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":81.6,"Cmnli":85.27,"Ocnli":77.94} +{"Rank":5,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":79.76,"Cmnli":84.1,"Ocnli":75.41} +{"Rank":6,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":76.77,"Cmnli":82.17,"Ocnli":71.37} +{"Rank":7,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":70.86,"Cmnli":77.67,"Ocnli":64.04} +{"Rank":8,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":70.4,"Cmnli":76.24,"Ocnli":64.57} +{"Rank":9,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":69.89,"Cmnli":78.18,"Ocnli":61.6} +{"Rank":10,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.56,"Cmnli":76.03,"Ocnli":63.08} +{"Rank":11,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":67.41,"Cmnli":73.87,"Ocnli":60.95} +{"Rank":12,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":67.07,"Cmnli":74.51,"Ocnli":59.63} +{"Rank":13,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":66.62,"Cmnli":72.55,"Ocnli":60.7} +{"Rank":14,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":66.45,"Cmnli":72.12,"Ocnli":60.77} +{"Rank":15,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":64.3,"Cmnli":69.27,"Ocnli":59.33} +{"Rank":16,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":63.99,"Cmnli":69.98,"Ocnli":58.0} +{"Rank":17,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":18,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":19,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":20,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":21,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":22,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":23,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":24,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":25,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":26,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":27,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":28,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":29,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":30,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":31,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":32,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":33,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":34,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":35,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":36,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":37,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":38,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":39,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":40,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":41,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":42,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":43,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":44,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":45,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":46,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":47,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":48,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":49,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":50,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":51,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":52,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":53,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":54,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":55,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":56,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":57,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":58,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":59,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":60,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":61,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":62,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":63,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":64,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":65,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":66,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":67,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":68,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":69,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":70,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":71,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":72,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":73,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":74,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":75,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":76,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":77,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":78,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":79,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":80,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":81,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":82,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":83,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":84,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":85,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":86,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":87,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":88,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":89,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":90,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":91,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":92,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":93,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":94,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":95,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":96,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":97,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":98,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":99,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":100,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":101,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":102,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":103,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":104,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":105,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":106,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":107,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":108,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":109,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":110,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":111,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":112,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} diff --git a/all_data_tasks/11/default.jsonl b/all_data_tasks/11/default.jsonl index d77d78d1a6fe18226f912a2a798fcec5aac1440f..8918e6f04e47bac0390bfc43a02c2a9f68bbee52 100644 --- a/all_data_tasks/11/default.jsonl +++ b/all_data_tasks/11/default.jsonl @@ -1,79 +1,112 @@ -{"index":207,"Rank":1,"Model":"xiaobu-embedding-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.58,"CMedQAv1":90.96,"CMedQAv2":90.41,"MMarcoReranking":39.91,"T2Reranking":69.03} -{"index":169,"Rank":2,"Model":"zpoint_large_embedding_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.34,"CMedQAv1":91.11,"CMedQAv2":90.07,"MMarcoReranking":38.87,"T2Reranking":69.29} -{"index":248,"Rank":3,"Model":"360Zhinao-1.8B-Reranking<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.13,"CMedQAv1":86.75,"CMedQAv2":87.92,"MMarcoReranking":37.29,"T2Reranking":68.55} -{"index":253,"Rank":4,"Model":"piccolo-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.0,"CMedQAv1":89.31,"CMedQAv2":90.14,"MMarcoReranking":33.39,"T2Reranking":67.15} -{"index":38,"Rank":5,"Model":"Yinka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.78,"CMedQAv1":89.26,"CMedQAv2":90.05,"MMarcoReranking":32.74,"T2Reranking":67.05} -{"index":315,"Rank":6,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.67,"CMedQAv1":88.06,"CMedQAv2":88.46,"MMarcoReranking":34.3,"T2Reranking":67.85} -{"index":50,"Rank":7,"Model":"IYun-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.3,"CMedQAv1":88.74,"CMedQAv2":89.42,"MMarcoReranking":31.61,"T2Reranking":67.45} -{"index":249,"Rank":8,"Model":"360Zhinao-search<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.92,"CMedQAv1":87.0,"CMedQAv2":88.48,"MMarcoReranking":32.41,"T2Reranking":67.8} -{"index":234,"Rank":9,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.92,"CMedQAv1":88.2,"CMedQAv2":88.03,"MMarcoReranking":31.65,"T2Reranking":67.8} -{"index":17,"Rank":10,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":68.92,"CMedQAv1":88.2,"CMedQAv2":88.03,"MMarcoReranking":31.65,"T2Reranking":67.8} -{"index":142,"Rank":11,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.92,"CMedQAv1":88.2,"CMedQAv2":88.03,"MMarcoReranking":31.65,"T2Reranking":67.8} -{"index":276,"Rank":12,"Model":"AGE_Hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.72,"CMedQAv1":89.37,"CMedQAv2":89.27,"MMarcoReranking":29.64,"T2Reranking":66.61} -{"index":223,"Rank":13,"Model":"LdIR-reranker-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.58,"CMedQAv1":84.35,"CMedQAv2":86.45,"MMarcoReranking":35.64,"T2Reranking":67.86} -{"index":116,"Rank":14,"Model":"stella-mrl-large-zh-v3.5-1792d-1024<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.45,"CMedQAv1":89.33,"CMedQAv2":89.18,"MMarcoReranking":28.85,"T2Reranking":66.43} -{"index":155,"Rank":15,"Model":"stella-mrl-large-zh-v3.5-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.45,"CMedQAv1":89.33,"CMedQAv2":89.18,"MMarcoReranking":28.85,"T2Reranking":66.43} -{"index":120,"Rank":16,"Model":"PEG<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.41,"CMedQAv1":84.09,"CMedQAv2":86.56,"MMarcoReranking":33.55,"T2Reranking":69.43} -{"index":12,"Rank":17,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.27,"CMedQAv1":88.99,"CMedQAv2":89.6,"MMarcoReranking":28.12,"T2Reranking":66.38} -{"index":154,"Rank":18,"Model":"stella-large-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.26,"CMedQAv1":88.99,"CMedQAv2":89.6,"MMarcoReranking":28.09,"T2Reranking":66.38} -{"index":233,"Rank":19,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.21,"CMedQAv1":88.16,"CMedQAv2":88.12,"MMarcoReranking":29.14,"T2Reranking":67.43} -{"index":16,"Rank":20,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.21,"CMedQAv1":88.16,"CMedQAv2":88.12,"MMarcoReranking":29.14,"T2Reranking":67.43} -{"index":129,"Rank":21,"Model":"acge_text_embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.98,"CMedQAv1":88.66,"CMedQAv2":88.9,"MMarcoReranking":27.76,"T2Reranking":66.62} -{"index":225,"Rank":22,"Model":"bge-reranker-large-1k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.87,"CMedQAv1":82.15,"CMedQAv2":84.19,"MMarcoReranking":37.64,"T2Reranking":67.48} -{"index":173,"Rank":23,"Model":"stella-base-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.84,"CMedQAv1":88.34,"CMedQAv2":89.06,"MMarcoReranking":27.48,"T2Reranking":66.49} -{"index":282,"Rank":24,"Model":"BAAI-bge-reranker-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.78,"CMedQAv1":82.14,"CMedQAv2":84.19,"MMarcoReranking":37.17,"T2Reranking":67.6} -{"index":105,"Rank":25,"Model":"alime-embedding-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.61,"CMedQAv1":86.52,"CMedQAv2":87.11,"MMarcoReranking":30.63,"T2Reranking":66.18} -{"index":106,"Rank":26,"Model":"alime-reranker-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.54,"CMedQAv1":82.32,"CMedQAv2":84.09,"MMarcoReranking":35.5,"T2Reranking":68.26} -{"index":286,"Rank":27,"Model":"gte-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.4,"CMedQAv1":86.09,"CMedQAv2":86.46,"MMarcoReranking":31.19,"T2Reranking":65.86} -{"index":206,"Rank":28,"Model":"xiaobu-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.34,"CMedQAv1":87.12,"CMedQAv2":87.57,"MMarcoReranking":28.08,"T2Reranking":66.6} -{"index":46,"Rank":29,"Model":"Dmeta-embedding-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.17,"CMedQAv1":88.49,"CMedQAv2":89.18,"MMarcoReranking":24.76,"T2Reranking":66.26} -{"index":31,"Rank":30,"Model":"bge-reranker-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.03,"CMedQAv1":81.27,"CMedQAv2":84.1,"MMarcoReranking":35.46,"T2Reranking":67.28} -{"index":30,"Rank":31,"Model":"bge-reranker-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.03,"CMedQAv1":81.27,"CMedQAv2":84.1,"MMarcoReranking":35.46,"T2Reranking":67.28} -{"index":144,"Rank":32,"Model":"bge-reranker-large-onnx<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.03,"CMedQAv1":81.27,"CMedQAv2":84.1,"MMarcoReranking":35.46,"T2Reranking":67.28} -{"index":224,"Rank":33,"Model":"bge-reranker-base-1k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.02,"CMedQAv1":81.26,"CMedQAv2":84.11,"MMarcoReranking":35.46,"T2Reranking":67.25} -{"index":284,"Rank":34,"Model":"gte-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.0,"CMedQAv1":86.79,"CMedQAv2":87.2,"MMarcoReranking":27.64,"T2Reranking":66.36} -{"index":171,"Rank":35,"Model":"stella-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.95,"CMedQAv1":85.69,"CMedQAv2":86.46,"MMarcoReranking":29.2,"T2Reranking":66.46} -{"index":47,"Rank":36,"Model":"Dmeta-embedding-zh-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.92,"CMedQAv1":86.78,"CMedQAv2":87.39,"MMarcoReranking":27.39,"T2Reranking":66.11} -{"index":251,"Rank":37,"Model":"piccolo-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.68,"CMedQAv1":85.25,"CMedQAv2":86.15,"MMarcoReranking":28.87,"T2Reranking":66.46} -{"index":280,"Rank":38,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.57,"CMedQAv1":83.64,"CMedQAv2":83.74,"MMarcoReranking":31.54,"T2Reranking":67.37} -{"index":199,"Rank":39,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.57,"CMedQAv1":83.64,"CMedQAv2":83.74,"MMarcoReranking":31.54,"T2Reranking":67.37} -{"index":175,"Rank":40,"Model":"stella-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.41,"CMedQAv1":85.45,"CMedQAv2":85.83,"MMarcoReranking":27.97,"T2Reranking":66.41} -{"index":20,"Rank":41,"Model":"tao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.39,"CMedQAv1":85.34,"CMedQAv2":85.87,"MMarcoReranking":27.96,"T2Reranking":66.38} -{"index":15,"Rank":42,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":66.38,"CMedQAv1":86.37,"CMedQAv2":87.41,"MMarcoReranking":23.64,"T2Reranking":68.11} -{"index":21,"Rank":43,"Model":"tao-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.38,"CMedQAv1":85.34,"CMedQAv2":85.81,"MMarcoReranking":27.97,"T2Reranking":66.38} -{"index":288,"Rank":44,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.22,"CMedQAv1":86.08,"CMedQAv2":87.26,"MMarcoReranking":26.13,"T2Reranking":65.39} -{"index":172,"Rank":45,"Model":"stella-base-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.1,"CMedQAv1":84.7,"CMedQAv2":85.31,"MMarcoReranking":28.05,"T2Reranking":66.35} -{"index":125,"Rank":46,"Model":"cloudy-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.04,"CMedQAv1":86.1,"CMedQAv2":86.95,"MMarcoReranking":24.26,"T2Reranking":66.83} -{"index":298,"Rank":47,"Model":"hktv-fine-tuned-cloudy-large-zh-metaphor14<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.04,"CMedQAv1":86.1,"CMedQAv2":86.95,"MMarcoReranking":24.26,"T2Reranking":66.83} -{"index":174,"Rank":48,"Model":"stella-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.98,"CMedQAv1":84.69,"CMedQAv2":85.23,"MMarcoReranking":27.16,"T2Reranking":66.86} -{"index":252,"Rank":49,"Model":"piccolo-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.98,"CMedQAv1":84.79,"CMedQAv2":84.89,"MMarcoReranking":27.27,"T2Reranking":66.96} -{"index":51,"Rank":50,"Model":"mist-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.89,"CMedQAv1":84.67,"CMedQAv2":85.27,"MMarcoReranking":27.84,"T2Reranking":65.79} -{"index":27,"Rank":51,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":65.84,"CMedQAv1":83.45,"CMedQAv2":85.44,"MMarcoReranking":28.74,"T2Reranking":65.74} -{"index":23,"Rank":52,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":65.4,"CMedQAv1":80.47,"CMedQAv2":84.88,"MMarcoReranking":29.74,"T2Reranking":66.49} -{"index":26,"Rank":53,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":64.9,"CMedQAv1":81.72,"CMedQAv2":84.64,"MMarcoReranking":27.1,"T2Reranking":66.16} -{"index":208,"Rank":54,"Model":"lim_base_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.73,"CMedQAv1":83.85,"CMedQAv2":84.75,"MMarcoReranking":20.56,"T2Reranking":65.75} -{"index":40,"Rank":55,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.2,"CMedQAv1":78.39,"CMedQAv2":80.84,"MMarcoReranking":27.29,"T2Reranking":66.3} -{"index":41,"Rank":56,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.82,"CMedQAv1":77.42,"CMedQAv2":77.84,"MMarcoReranking":30.17,"T2Reranking":65.85} -{"index":180,"Rank":57,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":61.86,"CMedQAv1":77.68,"CMedQAv2":78.66,"MMarcoReranking":24.21,"T2Reranking":66.9} -{"index":274,"Rank":58,"Model":"ZNV-Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.48,"CMedQAv1":80.4,"CMedQAv2":80.39,"MMarcoReranking":18.86,"T2Reranking":66.25} -{"index":39,"Rank":59,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.35,"CMedQAv1":76.47,"CMedQAv2":77.44,"MMarcoReranking":24.31,"T2Reranking":67.18} -{"index":226,"Rank":60,"Model":"m3e-ernie-xbase-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.96,"CMedQAv1":80.66,"CMedQAv2":79.31,"MMarcoReranking":19.57,"T2Reranking":64.31} -{"index":33,"Rank":61,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":60.92,"CMedQAv1":77.4,"CMedQAv2":79.86,"MMarcoReranking":20.5,"T2Reranking":65.9} -{"index":221,"Rank":62,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":59.66,"CMedQAv1":77.76,"CMedQAv2":78.27,"MMarcoReranking":16.46,"T2Reranking":66.13} -{"index":220,"Rank":63,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":59.34,"CMedQAv1":77.05,"CMedQAv2":76.76,"MMarcoReranking":17.51,"T2Reranking":66.03} -{"index":191,"Rank":64,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.11,"CMedQAv1":70.54,"CMedQAv2":71.35,"MMarcoReranking":26.69,"T2Reranking":67.88} -{"index":189,"Rank":65,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.68,"CMedQAv1":68.91,"CMedQAv2":69.42,"MMarcoReranking":25.9,"T2Reranking":66.49} -{"index":188,"Rank":66,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.27,"CMedQAv1":66.06,"CMedQAv2":66.96,"MMarcoReranking":26.14,"T2Reranking":65.94} -{"index":184,"Rank":67,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":56.0,"CMedQAv1":68.25,"CMedQAv2":68.56,"MMarcoReranking":21.34,"T2Reranking":65.83} -{"index":183,"Rank":68,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":54.35,"CMedQAv1":65.21,"CMedQAv2":66.06,"MMarcoReranking":21.76,"T2Reranking":64.39} -{"index":311,"Rank":69,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.28,"CMedQAv1":63.08,"CMedQAv2":64.02,"MMarcoReranking":23.39,"T2Reranking":66.65} -{"index":186,"Rank":70,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":53.86,"CMedQAv1":63.44,"CMedQAv2":62.41,"MMarcoReranking":24.33,"T2Reranking":65.24} -{"index":190,"Rank":71,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.62,"CMedQAv1":63.41,"CMedQAv2":63.66,"MMarcoReranking":23.69,"T2Reranking":63.74} -{"index":277,"Rank":72,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":49.45,"CMedQAv1":59.26,"CMedQAv2":59.82,"MMarcoReranking":12.76,"T2Reranking":65.95} -{"index":279,"Rank":73,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":49.25,"CMedQAv1":57.82,"CMedQAv2":58.88,"MMarcoReranking":14.55,"T2Reranking":65.76} -{"index":53,"Rank":74,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":49.16,"CMedQAv1":58.92,"CMedQAv2":60.41,"MMarcoReranking":12.48,"T2Reranking":64.82} -{"index":48,"Rank":75,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.08,"CMedQAv1":53.07,"CMedQAv2":52.84,"MMarcoReranking":9.29,"T2Reranking":65.14} -{"index":49,"Rank":76,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":44.91,"CMedQAv1":50.19,"CMedQAv2":50.39,"MMarcoReranking":14.57,"T2Reranking":64.49} -{"index":159,"Rank":77,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":32.84,"CMedQAv1":34.31,"CMedQAv2":35.88,"MMarcoReranking":5.83,"T2Reranking":55.35} -{"index":289,"Rank":78,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":25.86,"CMedQAv1":19.72,"CMedQAv2":22.48,"MMarcoReranking":1.17,"T2Reranking":60.05} -{"index":29,"Rank":99,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CMedQAv1":"","CMedQAv2":"","MMarcoReranking":35.43,"T2Reranking":67.48} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":53.24,"CMedQAv1":68.25,"CMedQAv2":68.56,"MMarcoReranking":21.34,"MMarcoReranking (cmn-Hans)":29.12,"T2Reranking":65.83,"T2Reranking (cmn-Hans)":66.32} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":52.13,"CMedQAv1":65.21,"CMedQAv2":66.06,"MMarcoReranking":21.76,"MMarcoReranking (cmn-Hans)":30.52,"T2Reranking":64.39,"T2Reranking (cmn-Hans)":64.86} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":51.85,"CMedQAv1":63.44,"CMedQAv2":62.41,"MMarcoReranking":24.33,"MMarcoReranking (cmn-Hans)":29.98,"T2Reranking":65.24,"T2Reranking (cmn-Hans)":65.72} +{"Rank":4,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":5,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":6,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":7,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":8,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":9,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":10,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":12,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":13,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":14,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":15,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":16,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":88.99,"CMedQAv2":89.6,"MMarcoReranking":28.12,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":66.38,"T2Reranking (cmn-Hans)":null} +{"Rank":17,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":null,"CMedQAv1":86.37,"CMedQAv2":87.41,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":68.11,"T2Reranking (cmn-Hans)":null} +{"Rank":18,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"CMedQAv1":80.47,"CMedQAv2":84.88,"MMarcoReranking":29.74,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":66.49,"T2Reranking (cmn-Hans)":null} +{"Rank":19,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"CMedQAv1":81.72,"CMedQAv2":84.64,"MMarcoReranking":27.1,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":66.16,"T2Reranking (cmn-Hans)":null} +{"Rank":20,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"CMedQAv1":83.45,"CMedQAv2":85.44,"MMarcoReranking":28.74,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":65.74,"T2Reranking (cmn-Hans)":null} +{"Rank":21,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":22,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":null,"CMedQAv1":77.4,"CMedQAv2":79.86,"MMarcoReranking":20.5,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":65.9,"T2Reranking (cmn-Hans)":null} +{"Rank":23,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":24,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":25,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":26,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":27,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":28,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"CMedQAv1":58.92,"CMedQAv2":60.41,"MMarcoReranking":12.48,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":64.82,"T2Reranking (cmn-Hans)":null} +{"Rank":29,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":30,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":31,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":32,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":21.7,"T2Reranking":null,"T2Reranking (cmn-Hans)":65.63} +{"Rank":33,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":34,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":35,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":36,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":37,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":38,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":39,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":40,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":41,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":42,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":43,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":44,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":45,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":46,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":47,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":48,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":49,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":50,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":51,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":52,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":53,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":54,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":55,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":56,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":57,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":58,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":59,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":60,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":61,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":62,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":63,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":64,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"CMedQAv1":77.05,"CMedQAv2":76.76,"MMarcoReranking":17.51,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":66.03,"T2Reranking (cmn-Hans)":null} +{"Rank":65,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"CMedQAv1":77.76,"CMedQAv2":78.27,"MMarcoReranking":16.46,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":66.13,"T2Reranking (cmn-Hans)":null} +{"Rank":66,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":67,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":68,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":69,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":70,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":71,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":72,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":73,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":74,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":14.83,"T2Reranking":null,"T2Reranking (cmn-Hans)":63.29} +{"Rank":75,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":5.27,"T2Reranking":null,"T2Reranking (cmn-Hans)":60.32} +{"Rank":76,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":4.74,"T2Reranking":null,"T2Reranking (cmn-Hans)":56.26} +{"Rank":77,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":4.65,"T2Reranking":null,"T2Reranking (cmn-Hans)":58.3} +{"Rank":78,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":79,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":80,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":81,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":82,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":83,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":84,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":85,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":86,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":87,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":88,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":16.14,"T2Reranking":null,"T2Reranking (cmn-Hans)":65.28} +{"Rank":89,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":14.57,"T2Reranking":null,"T2Reranking (cmn-Hans)":64.49} +{"Rank":90,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":91,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":92,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":93,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":94,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":95,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":96,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"CMedQAv1":59.26,"CMedQAv2":59.82,"MMarcoReranking":12.76,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":65.95,"T2Reranking (cmn-Hans)":null} +{"Rank":97,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":null,"CMedQAv1":57.82,"CMedQAv2":58.88,"MMarcoReranking":14.55,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":65.76,"T2Reranking (cmn-Hans)":null} +{"Rank":98,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":99,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":100,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":101,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":102,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":103,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":104,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":105,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":106,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":107,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":63.08,"CMedQAv2":64.02,"MMarcoReranking":23.39,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":66.65,"T2Reranking (cmn-Hans)":null} +{"Rank":108,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":109,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":110,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":111,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":88.06,"CMedQAv2":88.46,"MMarcoReranking":34.3,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":67.85,"T2Reranking (cmn-Hans)":null} +{"Rank":112,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} diff --git a/all_data_tasks/12/default.jsonl b/all_data_tasks/12/default.jsonl index 12dbacbb4e82529591b5a8bc82e4acffbec9ad1c..2b0949510c80f07225fb5e0ef24d0335df71c801 100644 --- a/all_data_tasks/12/default.jsonl +++ b/all_data_tasks/12/default.jsonl @@ -1,72 +1,112 @@ -{"index":72,"Rank":1,"Model":"Zhihui_LLM_Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.74,"CmedqaRetrieval":48.69,"CovidRetrieval":84.39,"DuRetrieval":91.34,"EcomRetrieval":71.96,"MedicalRetrieval":65.19,"MMarcoRetrieval":84.77,"T2Retrieval":88.3,"VideoRetrieval":79.31} -{"index":207,"Rank":2,"Model":"xiaobu-embedding-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.5,"CmedqaRetrieval":47.14,"CovidRetrieval":89.4,"DuRetrieval":89.44,"EcomRetrieval":70.5,"MedicalRetrieval":68.19,"MMarcoRetrieval":82.19,"T2Retrieval":85.01,"VideoRetrieval":80.09} -{"index":169,"Rank":3,"Model":"zpoint_large_embedding_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.36,"CmedqaRetrieval":47.16,"CovidRetrieval":89.14,"DuRetrieval":89.23,"EcomRetrieval":70.74,"MedicalRetrieval":68.14,"MMarcoRetrieval":82.38,"T2Retrieval":83.81,"VideoRetrieval":80.26} -{"index":234,"Rank":4,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.03,"CmedqaRetrieval":48.69,"CovidRetrieval":83.65,"DuRetrieval":87.44,"EcomRetrieval":71.15,"MedicalRetrieval":65.59,"MMarcoRetrieval":85.16,"T2Retrieval":87.73,"VideoRetrieval":78.84} -{"index":142,"Rank":5,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.03,"CmedqaRetrieval":48.69,"CovidRetrieval":83.65,"DuRetrieval":87.44,"EcomRetrieval":71.15,"MedicalRetrieval":65.59,"MMarcoRetrieval":85.16,"T2Retrieval":87.73,"VideoRetrieval":78.84} -{"index":17,"Rank":6,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":76.03,"CmedqaRetrieval":48.69,"CovidRetrieval":83.65,"DuRetrieval":87.44,"EcomRetrieval":71.15,"MedicalRetrieval":65.59,"MMarcoRetrieval":85.16,"T2Retrieval":87.73,"VideoRetrieval":78.84} -{"index":249,"Rank":7,"Model":"360Zhinao-search<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.06,"CmedqaRetrieval":46.73,"CovidRetrieval":85.02,"DuRetrieval":87.57,"EcomRetrieval":68.9,"MedicalRetrieval":63.69,"MMarcoRetrieval":83.32,"T2Retrieval":87.12,"VideoRetrieval":78.09} -{"index":276,"Rank":8,"Model":"AGE_Hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.97,"CmedqaRetrieval":47.26,"CovidRetrieval":83.66,"DuRetrieval":89.28,"EcomRetrieval":69.28,"MedicalRetrieval":65.94,"MMarcoRetrieval":80.65,"T2Retrieval":86.88,"VideoRetrieval":76.79} -{"index":12,"Rank":9,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.41,"CmedqaRetrieval":46.56,"CovidRetrieval":84.03,"DuRetrieval":87.85,"EcomRetrieval":68.79,"MedicalRetrieval":65.92,"MMarcoRetrieval":79.93,"T2Retrieval":86.76,"VideoRetrieval":75.43} -{"index":38,"Rank":10,"Model":"Yinka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.41,"CmedqaRetrieval":47.64,"CovidRetrieval":85.95,"DuRetrieval":89.11,"EcomRetrieval":67.92,"MedicalRetrieval":65.2,"MMarcoRetrieval":79.81,"T2Retrieval":86.24,"VideoRetrieval":73.39} -{"index":253,"Rank":11,"Model":"piccolo-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.36,"CmedqaRetrieval":47.58,"CovidRetrieval":86.78,"DuRetrieval":89.14,"EcomRetrieval":67.75,"MedicalRetrieval":64.88,"MMarcoRetrieval":79.54,"T2Retrieval":86.14,"VideoRetrieval":73.1} -{"index":120,"Rank":12,"Model":"PEG<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.78,"CmedqaRetrieval":44.42,"CovidRetrieval":82.56,"DuRetrieval":87.67,"EcomRetrieval":67.32,"MedicalRetrieval":60.99,"MMarcoRetrieval":82.63,"T2Retrieval":87.0,"VideoRetrieval":77.64} -{"index":29,"Rank":13,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.73,"CmedqaRetrieval":42.21,"CovidRetrieval":77.46,"DuRetrieval":90.46,"EcomRetrieval":69.3,"MedicalRetrieval":62.02,"MMarcoRetrieval":84.7,"T2Retrieval":86.26,"VideoRetrieval":77.4} -{"index":154,"Rank":14,"Model":"stella-large-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.6,"CmedqaRetrieval":46.87,"CovidRetrieval":82.44,"DuRetrieval":87.13,"EcomRetrieval":68.62,"MedicalRetrieval":65.18,"MMarcoRetrieval":79.14,"T2Retrieval":85.56,"VideoRetrieval":73.89} -{"index":50,"Rank":15,"Model":"IYun-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.56,"CmedqaRetrieval":46.68,"CovidRetrieval":85.24,"DuRetrieval":88.26,"EcomRetrieval":67.23,"MedicalRetrieval":64.29,"MMarcoRetrieval":78.64,"T2Retrieval":85.29,"VideoRetrieval":72.89} -{"index":155,"Rank":16,"Model":"stella-mrl-large-zh-v3.5-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.52,"CmedqaRetrieval":46.61,"CovidRetrieval":81.87,"DuRetrieval":86.8,"EcomRetrieval":68.1,"MedicalRetrieval":64.99,"MMarcoRetrieval":79.21,"T2Retrieval":85.85,"VideoRetrieval":74.71} -{"index":116,"Rank":17,"Model":"stella-mrl-large-zh-v3.5-1792d-1024<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.52,"CmedqaRetrieval":46.61,"CovidRetrieval":81.87,"DuRetrieval":86.8,"EcomRetrieval":68.1,"MedicalRetrieval":64.99,"MMarcoRetrieval":79.21,"T2Retrieval":85.85,"VideoRetrieval":74.71} -{"index":206,"Rank":18,"Model":"xiaobu-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.41,"CmedqaRetrieval":44.22,"CovidRetrieval":87.69,"DuRetrieval":86.45,"EcomRetrieval":68.04,"MedicalRetrieval":63.38,"MMarcoRetrieval":78.47,"T2Retrieval":85.45,"VideoRetrieval":73.59} -{"index":105,"Rank":19,"Model":"alime-embedding-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.3,"CmedqaRetrieval":43.93,"CovidRetrieval":88.14,"DuRetrieval":86.23,"EcomRetrieval":67.56,"MedicalRetrieval":63.57,"MMarcoRetrieval":78.25,"T2Retrieval":84.58,"VideoRetrieval":74.16} -{"index":315,"Rank":20,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.12,"CmedqaRetrieval":47.64,"CovidRetrieval":86.86,"DuRetrieval":88.43,"EcomRetrieval":66.39,"MedicalRetrieval":61.1,"MMarcoRetrieval":80.17,"T2Retrieval":80.11,"VideoRetrieval":74.28} -{"index":129,"Rank":21,"Model":"acge_text_embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.93,"CmedqaRetrieval":46.78,"CovidRetrieval":81.56,"DuRetrieval":86.55,"EcomRetrieval":67.6,"MedicalRetrieval":64.1,"MMarcoRetrieval":78.42,"T2Retrieval":85.08,"VideoRetrieval":73.32} -{"index":298,"Rank":22,"Model":"hktv-fine-tuned-cloudy-large-zh-metaphor14<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.92,"CmedqaRetrieval":43.58,"CovidRetrieval":85.83,"DuRetrieval":86.2,"EcomRetrieval":66.99,"MedicalRetrieval":64.23,"MMarcoRetrieval":77.69,"T2Retrieval":84.94,"VideoRetrieval":73.86} -{"index":125,"Rank":23,"Model":"cloudy-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.92,"CmedqaRetrieval":43.58,"CovidRetrieval":85.83,"DuRetrieval":86.2,"EcomRetrieval":66.99,"MedicalRetrieval":64.23,"MMarcoRetrieval":77.69,"T2Retrieval":84.94,"VideoRetrieval":73.86} -{"index":286,"Rank":24,"Model":"gte-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.49,"CmedqaRetrieval":43.15,"CovidRetrieval":88.41,"DuRetrieval":85.04,"EcomRetrieval":67.25,"MedicalRetrieval":62.88,"MMarcoRetrieval":77.53,"T2Retrieval":81.93,"VideoRetrieval":73.7} -{"index":173,"Rank":25,"Model":"stella-base-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.28,"CmedqaRetrieval":45.38,"CovidRetrieval":80.61,"DuRetrieval":85.55,"EcomRetrieval":67.88,"MedicalRetrieval":64.34,"MMarcoRetrieval":77.42,"T2Retrieval":84.5,"VideoRetrieval":72.56} -{"index":233,"Rank":26,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.86,"CmedqaRetrieval":46.97,"CovidRetrieval":80.79,"DuRetrieval":89.4,"EcomRetrieval":62.51,"MedicalRetrieval":58.65,"MMarcoRetrieval":83.01,"T2Retrieval":85.47,"VideoRetrieval":68.11} -{"index":16,"Rank":27,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.86,"CmedqaRetrieval":46.97,"CovidRetrieval":80.79,"DuRetrieval":89.4,"EcomRetrieval":62.51,"MedicalRetrieval":58.65,"MMarcoRetrieval":83.01,"T2Retrieval":85.47,"VideoRetrieval":68.11} -{"index":21,"Rank":28,"Model":"tao-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.85,"CmedqaRetrieval":42.73,"CovidRetrieval":82.19,"DuRetrieval":88.02,"EcomRetrieval":64.71,"MedicalRetrieval":60.63,"MMarcoRetrieval":79.56,"T2Retrieval":84.69,"VideoRetrieval":72.25} -{"index":284,"Rank":29,"Model":"gte-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.71,"CmedqaRetrieval":44.38,"CovidRetrieval":75.74,"DuRetrieval":83.64,"EcomRetrieval":69.56,"MedicalRetrieval":64.94,"MMarcoRetrieval":77.7,"T2Retrieval":83.08,"VideoRetrieval":74.67} -{"index":251,"Rank":30,"Model":"piccolo-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.2,"CmedqaRetrieval":42.02,"CovidRetrieval":84.5,"DuRetrieval":88.25,"EcomRetrieval":62.88,"MedicalRetrieval":58.09,"MMarcoRetrieval":77.92,"T2Retrieval":82.38,"VideoRetrieval":73.54} -{"index":171,"Rank":31,"Model":"stella-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.07,"CmedqaRetrieval":41.86,"CovidRetrieval":84.71,"DuRetrieval":87.8,"EcomRetrieval":62.46,"MedicalRetrieval":57.97,"MMarcoRetrieval":78.22,"T2Retrieval":82.51,"VideoRetrieval":73.04} -{"index":174,"Rank":32,"Model":"stella-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.02,"CmedqaRetrieval":42.28,"CovidRetrieval":85.19,"DuRetrieval":87.81,"EcomRetrieval":61.66,"MedicalRetrieval":59.38,"MMarcoRetrieval":78.1,"T2Retrieval":82.76,"VideoRetrieval":70.96} -{"index":252,"Rank":33,"Model":"piccolo-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.93,"CmedqaRetrieval":41.98,"CovidRetrieval":85.04,"DuRetrieval":87.97,"EcomRetrieval":61.91,"MedicalRetrieval":59.04,"MMarcoRetrieval":77.83,"T2Retrieval":82.47,"VideoRetrieval":71.18} -{"index":245,"Rank":34,"Model":"checkpoint-9000<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.63,"CmedqaRetrieval":40.57,"CovidRetrieval":75.93,"DuRetrieval":83.73,"EcomRetrieval":66.14,"MedicalRetrieval":58.58,"MMarcoRetrieval":81.95,"T2Retrieval":84.17,"VideoRetrieval":73.94} -{"index":96,"Rank":35,"Model":"qwen-1.8b-retrieval-test<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.63,"CmedqaRetrieval":40.57,"CovidRetrieval":75.93,"DuRetrieval":83.73,"EcomRetrieval":66.14,"MedicalRetrieval":58.58,"MMarcoRetrieval":81.95,"T2Retrieval":84.17,"VideoRetrieval":73.94} -{"index":15,"Rank":36,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":70.62,"CmedqaRetrieval":43.47,"CovidRetrieval":80.87,"DuRetrieval":86.01,"EcomRetrieval":66.46,"MedicalRetrieval":61.33,"MMarcoRetrieval":73.83,"T2Retrieval":83.58,"VideoRetrieval":69.41} -{"index":26,"Rank":37,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":70.54,"CmedqaRetrieval":41.03,"CovidRetrieval":75.07,"DuRetrieval":84.68,"EcomRetrieval":65.6,"MedicalRetrieval":58.28,"MMarcoRetrieval":81.38,"T2Retrieval":84.39,"VideoRetrieval":73.93} -{"index":27,"Rank":38,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":70.46,"CmedqaRetrieval":42.57,"CovidRetrieval":73.35,"DuRetrieval":86.32,"EcomRetrieval":65.33,"MedicalRetrieval":59.59,"MMarcoRetrieval":79.23,"T2Retrieval":83.99,"VideoRetrieval":73.32} -{"index":46,"Rank":39,"Model":"Dmeta-embedding-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.41,"CmedqaRetrieval":45.83,"CovidRetrieval":76.77,"DuRetrieval":81.47,"EcomRetrieval":68.42,"MedicalRetrieval":64.42,"MMarcoRetrieval":71.7,"T2Retrieval":81.33,"VideoRetrieval":73.31} -{"index":20,"Rank":40,"Model":"tao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.26,"CmedqaRetrieval":42.68,"CovidRetrieval":80.79,"DuRetrieval":85.74,"EcomRetrieval":62.06,"MedicalRetrieval":59.53,"MMarcoRetrieval":78.66,"T2Retrieval":84.69,"VideoRetrieval":67.96} -{"index":175,"Rank":41,"Model":"stella-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.14,"CmedqaRetrieval":42.63,"CovidRetrieval":80.81,"DuRetrieval":85.75,"EcomRetrieval":62.07,"MedicalRetrieval":59.42,"MMarcoRetrieval":78.67,"T2Retrieval":83.87,"VideoRetrieval":67.93} -{"index":172,"Rank":42,"Model":"stella-base-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.08,"CmedqaRetrieval":42.03,"CovidRetrieval":79.86,"DuRetrieval":86.97,"EcomRetrieval":62.85,"MedicalRetrieval":58.5,"MMarcoRetrieval":78.36,"T2Retrieval":83.76,"VideoRetrieval":68.27} -{"index":51,"Rank":43,"Model":"mist-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.59,"CmedqaRetrieval":41.8,"CovidRetrieval":79.3,"DuRetrieval":86.43,"EcomRetrieval":62.64,"MedicalRetrieval":58.27,"MMarcoRetrieval":78.17,"T2Retrieval":82.11,"VideoRetrieval":67.96} -{"index":23,"Rank":44,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":69.49,"CmedqaRetrieval":41.61,"CovidRetrieval":74.7,"DuRetrieval":85.07,"EcomRetrieval":64.25,"MedicalRetrieval":56.51,"MMarcoRetrieval":77.69,"T2Retrieval":83.71,"VideoRetrieval":72.35} -{"index":199,"Rank":45,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.4,"CmedqaRetrieval":39.15,"CovidRetrieval":81.22,"DuRetrieval":84.57,"EcomRetrieval":63.95,"MedicalRetrieval":57.12,"MMarcoRetrieval":77.96,"T2Retrieval":80.59,"VideoRetrieval":70.62} -{"index":280,"Rank":46,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.4,"CmedqaRetrieval":39.15,"CovidRetrieval":81.22,"DuRetrieval":84.57,"EcomRetrieval":63.95,"MedicalRetrieval":57.12,"MMarcoRetrieval":77.96,"T2Retrieval":80.59,"VideoRetrieval":70.62} -{"index":47,"Rank":47,"Model":"Dmeta-embedding-zh-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.7,"CmedqaRetrieval":42.69,"CovidRetrieval":75.63,"DuRetrieval":79.89,"EcomRetrieval":64.48,"MedicalRetrieval":59.95,"MMarcoRetrieval":70.75,"T2Retrieval":79.9,"VideoRetrieval":68.3} -{"index":288,"Rank":48,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.5,"CmedqaRetrieval":42.46,"CovidRetrieval":61.65,"DuRetrieval":78.57,"EcomRetrieval":57.91,"MedicalRetrieval":62.64,"MMarcoRetrieval":75.0,"T2Retrieval":78.12,"VideoRetrieval":67.69} -{"index":40,"Rank":49,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.11,"CmedqaRetrieval":34.08,"CovidRetrieval":71.92,"DuRetrieval":79.7,"EcomRetrieval":59.22,"MedicalRetrieval":51.5,"MMarcoRetrieval":74.73,"T2Retrieval":77.08,"VideoRetrieval":64.67} -{"index":184,"Rank":50,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":63.66,"CmedqaRetrieval":28.67,"CovidRetrieval":75.51,"DuRetrieval":85.32,"EcomRetrieval":54.75,"MedicalRetrieval":51.44,"MMarcoRetrieval":79.2,"T2Retrieval":76.11,"VideoRetrieval":58.25} -{"index":41,"Rank":51,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.52,"CmedqaRetrieval":33.45,"CovidRetrieval":73.36,"DuRetrieval":80.13,"EcomRetrieval":55.89,"MedicalRetrieval":50.67,"MMarcoRetrieval":74.14,"T2Retrieval":76.84,"VideoRetrieval":63.66} -{"index":191,"Rank":52,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.87,"CmedqaRetrieval":27.0,"CovidRetrieval":68.86,"DuRetrieval":78.08,"EcomRetrieval":58.34,"MedicalRetrieval":49.63,"MMarcoRetrieval":78.82,"T2Retrieval":79.82,"VideoRetrieval":62.44} -{"index":33,"Rank":53,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":61.77,"CmedqaRetrieval":35.11,"CovidRetrieval":70.14,"DuRetrieval":77.28,"EcomRetrieval":55.71,"MedicalRetrieval":49.8,"MMarcoRetrieval":63.48,"T2Retrieval":76.43,"VideoRetrieval":66.19} -{"index":180,"Rank":54,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":61.75,"CmedqaRetrieval":34.22,"CovidRetrieval":73.13,"DuRetrieval":87.02,"EcomRetrieval":45.96,"MedicalRetrieval":52.75,"MMarcoRetrieval":74.83,"T2Retrieval":80.68,"VideoRetrieval":45.39} -{"index":183,"Rank":55,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":61.63,"CmedqaRetrieval":27.2,"CovidRetrieval":73.45,"DuRetrieval":81.64,"EcomRetrieval":54.17,"MedicalRetrieval":48.35,"MMarcoRetrieval":76.04,"T2Retrieval":70.86,"VideoRetrieval":61.3} -{"index":39,"Rank":56,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.6,"CmedqaRetrieval":31.79,"CovidRetrieval":66.33,"DuRetrieval":75.25,"EcomRetrieval":57.32,"MedicalRetrieval":49.28,"MMarcoRetrieval":69.11,"T2Retrieval":75.94,"VideoRetrieval":59.76} -{"index":186,"Rank":57,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":59.95,"CmedqaRetrieval":24.38,"CovidRetrieval":72.82,"DuRetrieval":81.35,"EcomRetrieval":53.56,"MedicalRetrieval":44.84,"MMarcoRetrieval":73.17,"T2Retrieval":71.39,"VideoRetrieval":58.09} -{"index":189,"Rank":58,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.77,"CmedqaRetrieval":26.18,"CovidRetrieval":67.57,"DuRetrieval":75.82,"EcomRetrieval":51.28,"MedicalRetrieval":47.48,"MMarcoRetrieval":77.0,"T2Retrieval":76.3,"VideoRetrieval":56.54} -{"index":226,"Rank":59,"Model":"m3e-ernie-xbase-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.76,"CmedqaRetrieval":32.33,"CovidRetrieval":67.22,"DuRetrieval":75.27,"EcomRetrieval":58.24,"MedicalRetrieval":54.78,"MMarcoRetrieval":57.91,"T2Retrieval":69.22,"VideoRetrieval":63.09} -{"index":274,"Rank":60,"Model":"ZNV-Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.2,"CmedqaRetrieval":30.34,"CovidRetrieval":58.55,"DuRetrieval":75.05,"EcomRetrieval":52.54,"MedicalRetrieval":50.08,"MMarcoRetrieval":62.26,"T2Retrieval":72.7,"VideoRetrieval":64.08} -{"index":188,"Rank":61,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.96,"CmedqaRetrieval":23.91,"CovidRetrieval":68.14,"DuRetrieval":72.84,"EcomRetrieval":52.02,"MedicalRetrieval":43.27,"MMarcoRetrieval":74.66,"T2Retrieval":72.81,"VideoRetrieval":56.07} -{"index":220,"Rank":62,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":56.91,"CmedqaRetrieval":30.33,"CovidRetrieval":66.42,"DuRetrieval":75.76,"EcomRetrieval":50.27,"MedicalRetrieval":42.79,"MMarcoRetrieval":65.46,"T2Retrieval":73.14,"VideoRetrieval":51.11} -{"index":221,"Rank":63,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":54.75,"CmedqaRetrieval":30.73,"CovidRetrieval":61.33,"DuRetrieval":74.69,"EcomRetrieval":45.18,"MedicalRetrieval":48.66,"MMarcoRetrieval":61.06,"T2Retrieval":72.36,"VideoRetrieval":44.02} -{"index":311,"Rank":64,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.0,"CmedqaRetrieval":22.36,"CovidRetrieval":57.21,"DuRetrieval":71.17,"EcomRetrieval":44.49,"MedicalRetrieval":37.92,"MMarcoRetrieval":69.86,"T2Retrieval":69.14,"VideoRetrieval":43.85} -{"index":190,"Rank":65,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.16,"CmedqaRetrieval":20.03,"CovidRetrieval":33.57,"DuRetrieval":56.04,"EcomRetrieval":47.0,"MedicalRetrieval":36.68,"MMarcoRetrieval":69.31,"T2Retrieval":51.57,"VideoRetrieval":55.07} -{"index":279,"Rank":66,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":44.4,"CmedqaRetrieval":18.04,"CovidRetrieval":55.48,"DuRetrieval":59.36,"EcomRetrieval":40.48,"MedicalRetrieval":29.8,"MMarcoRetrieval":55.31,"T2Retrieval":58.67,"VideoRetrieval":38.04} -{"index":53,"Rank":67,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":41.94,"CmedqaRetrieval":15.53,"CovidRetrieval":60.48,"DuRetrieval":51.87,"EcomRetrieval":37.58,"MedicalRetrieval":30.93,"MMarcoRetrieval":45.96,"T2Retrieval":50.52,"VideoRetrieval":42.65} -{"index":277,"Rank":68,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":38.79,"CmedqaRetrieval":15.91,"CovidRetrieval":44.81,"DuRetrieval":52.23,"EcomRetrieval":34.6,"MedicalRetrieval":27.56,"MMarcoRetrieval":44.06,"T2Retrieval":51.67,"VideoRetrieval":39.52} -{"index":48,"Rank":69,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":32.1,"CmedqaRetrieval":11.35,"CovidRetrieval":39.63,"DuRetrieval":41.16,"EcomRetrieval":32.71,"MedicalRetrieval":20.35,"MMarcoRetrieval":34.62,"T2Retrieval":41.8,"VideoRetrieval":35.15} -{"index":49,"Rank":70,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":22.92,"CmedqaRetrieval":10.15,"CovidRetrieval":28.85,"DuRetrieval":33.41,"EcomRetrieval":9.69,"MedicalRetrieval":14.1,"MMarcoRetrieval":44.62,"T2Retrieval":28.35,"VideoRetrieval":14.17} -{"index":159,"Rank":71,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":7.15,"CmedqaRetrieval":7.22,"CovidRetrieval":10.38,"DuRetrieval":1.44,"EcomRetrieval":4.68,"MedicalRetrieval":4.08,"MMarcoRetrieval":11.0,"T2Retrieval":1.31,"VideoRetrieval":17.05} -{"index":289,"Rank":72,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":4.52,"CmedqaRetrieval":2.22,"CovidRetrieval":2.32,"DuRetrieval":5.86,"EcomRetrieval":10.37,"MedicalRetrieval":2.37,"MMarcoRetrieval":3.64,"T2Retrieval":4.64,"VideoRetrieval":4.77} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":63.66,"CmedqaRetrieval":28.67,"CmedqaRetrieval (cmn-Hans)":28.66,"CovidRetrieval":75.51,"CovidRetrieval (cmn-Hans)":75.61,"DuRetrieval":85.32,"DuRetrieval (cmn-Hans)":85.3,"EcomRetrieval":54.75,"EcomRetrieval (cmn-Hans)":54.67,"MedicalRetrieval":51.44,"MedicalRetrieval (cmn-Hans)":51.44,"MMarcoRetrieval":79.2,"MMarcoRetrieval (cmn-Hans)":79.2,"T2Retrieval":76.11,"T2Retrieval (cmn-Hans)":76.07,"VideoRetrieval":58.25,"VideoRetrieval (cmn-Hans)":58.28} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":61.61,"CmedqaRetrieval":27.2,"CmedqaRetrieval (cmn-Hans)":27.2,"CovidRetrieval":73.45,"CovidRetrieval (cmn-Hans)":73.48,"DuRetrieval":81.64,"DuRetrieval (cmn-Hans)":81.66,"EcomRetrieval":54.17,"EcomRetrieval (cmn-Hans)":54.01,"MedicalRetrieval":48.35,"MedicalRetrieval (cmn-Hans)":48.33,"MMarcoRetrieval":76.04,"MMarcoRetrieval (cmn-Hans)":76.01,"T2Retrieval":70.86,"T2Retrieval (cmn-Hans)":70.77,"VideoRetrieval":61.3,"VideoRetrieval (cmn-Hans)":61.26} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":59.94,"CmedqaRetrieval":24.38,"CmedqaRetrieval (cmn-Hans)":24.36,"CovidRetrieval":72.82,"CovidRetrieval (cmn-Hans)":72.82,"DuRetrieval":81.35,"DuRetrieval (cmn-Hans)":81.36,"EcomRetrieval":53.56,"EcomRetrieval (cmn-Hans)":53.53,"MedicalRetrieval":44.84,"MedicalRetrieval (cmn-Hans)":44.84,"MMarcoRetrieval":73.17,"MMarcoRetrieval (cmn-Hans)":73.17,"T2Retrieval":71.39,"T2Retrieval (cmn-Hans)":71.36,"VideoRetrieval":58.09,"VideoRetrieval (cmn-Hans)":58.06} +{"Rank":4,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":5,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":6,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":7,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":8,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":9,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":10,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":12,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":13,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":14,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":15,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":16,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":46.56,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":84.03,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":87.85,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":68.79,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":65.92,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":79.93,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":86.76,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":75.43,"VideoRetrieval (cmn-Hans)":null} +{"Rank":17,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":null,"CmedqaRetrieval":43.47,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":80.87,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":86.01,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":66.46,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":61.33,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":73.83,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":83.58,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":69.41,"VideoRetrieval (cmn-Hans)":null} +{"Rank":18,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"CmedqaRetrieval":41.61,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":74.7,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":85.07,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":64.25,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":56.51,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":77.69,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":83.71,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":72.35,"VideoRetrieval (cmn-Hans)":null} +{"Rank":19,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"CmedqaRetrieval":41.03,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":75.07,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":84.68,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":65.6,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":58.28,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":81.38,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":84.39,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":73.93,"VideoRetrieval (cmn-Hans)":null} +{"Rank":20,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"CmedqaRetrieval":42.57,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":73.35,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":86.32,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":65.33,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":59.59,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":79.23,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":83.99,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":73.32,"VideoRetrieval (cmn-Hans)":null} +{"Rank":21,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":22,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":null,"CmedqaRetrieval":35.11,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":70.14,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":77.28,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":55.71,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":49.8,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":63.48,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":76.43,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":66.19,"VideoRetrieval (cmn-Hans)":null} +{"Rank":23,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":24,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":25,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":26,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":27,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":28,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"CmedqaRetrieval":15.53,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":60.48,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":51.87,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":37.58,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":30.93,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":45.96,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":50.52,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":42.65,"VideoRetrieval (cmn-Hans)":null} +{"Rank":29,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":30,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":31,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":32,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":35.58,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":73.47,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":88.18,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":54.33,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":55.81,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":76.54,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":82.96,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":53.85} +{"Rank":33,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":34,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":35,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":36,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":37,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":38,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":39,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":40,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":41,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":42,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":43,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":44,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":45,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":46,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":47,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":48,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":49,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":50,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":51,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":52,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":53,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":54,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":55,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":56,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":57,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":58,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":59,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":60,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":61,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":62,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":63,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":64,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"CmedqaRetrieval":30.33,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":66.42,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":75.76,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":50.27,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":42.79,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":65.46,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":73.14,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":51.11,"VideoRetrieval (cmn-Hans)":null} +{"Rank":65,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"CmedqaRetrieval":30.73,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":61.33,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":74.69,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":45.18,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":48.66,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":61.06,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":72.36,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":44.02,"VideoRetrieval (cmn-Hans)":null} +{"Rank":66,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":67,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":68,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":69,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":70,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":71,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":72,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":73,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":74,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":5.49,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":28.6,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":26.34,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":25.42,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":6.68,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":34.78,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":25.32,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":22.04} +{"Rank":75,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":2.58,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":10.79,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":6.62,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":4.01,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":2.3,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":7.46,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":4.82,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":9.38} +{"Rank":76,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":2.03,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":0.8,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":3.03,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":3.7,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":1.76,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":6.21,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":1.6,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":9.79} +{"Rank":77,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":2.0,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":3.7,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":4.92,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":3.94,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":1.71,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":7.13,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":2.98,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":8.48} +{"Rank":78,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":79,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":80,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":81,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":82,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":83,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":84,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":85,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":86,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":87,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":88,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":10.78,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":30.11,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":34.72,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":13.32,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":15.46,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":46.62,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":30.31,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":14.71} +{"Rank":89,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":10.15,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":28.85,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":33.41,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":9.69,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":14.1,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":44.62,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":28.35,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":14.18} +{"Rank":90,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":91,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":92,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":93,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":94,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":95,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":96,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"CmedqaRetrieval":15.91,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":44.81,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":52.23,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":34.6,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":27.56,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":44.06,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":51.67,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":39.52,"VideoRetrieval (cmn-Hans)":null} +{"Rank":97,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":null,"CmedqaRetrieval":18.04,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":55.48,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":59.36,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":40.48,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":29.8,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":55.31,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":58.67,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":38.04,"VideoRetrieval (cmn-Hans)":null} +{"Rank":98,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":99,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":100,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":101,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":102,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":103,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":104,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":105,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":106,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":107,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":22.36,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":57.21,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":71.17,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":44.49,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":37.92,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":69.86,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":69.14,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":43.85,"VideoRetrieval (cmn-Hans)":null} +{"Rank":108,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":109,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":110,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":111,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":47.64,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":86.86,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":88.43,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":66.39,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":61.1,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":80.17,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":80.11,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":74.28,"VideoRetrieval (cmn-Hans)":null} +{"Rank":112,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} diff --git a/all_data_tasks/13/default.jsonl b/all_data_tasks/13/default.jsonl index c15d2dcc5107cf899a9c7dc6c1650581eb9a28c8..6e3d57e76abad60aac5b18c8fa6b71300babda6b 100644 --- a/all_data_tasks/13/default.jsonl +++ b/all_data_tasks/13/default.jsonl @@ -1,87 +1,112 @@ -{"index":274,"Rank":1,"Model":"ZNV-Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.66,"AFQMC":53.84,"ATEC":54.44,"BQ":70.57,"LCQMC":74.99,"PAWSX":58.35,"QBQTC":71.45,"STS22 (zh)":74.92,"STSB":82.74} -{"index":17,"Rank":2,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":65.33,"AFQMC":72.25,"ATEC":62.62,"BQ":81.25,"LCQMC":73.81,"PAWSX":54.06,"QBQTC":31.37,"STS22 (zh)":66.13,"STSB":81.17} -{"index":142,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.33,"AFQMC":72.25,"ATEC":62.62,"BQ":81.25,"LCQMC":73.81,"PAWSX":54.06,"QBQTC":31.37,"STS22 (zh)":66.13,"STSB":81.17} -{"index":234,"Rank":4,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.33,"AFQMC":72.25,"ATEC":62.62,"BQ":81.25,"LCQMC":73.81,"PAWSX":54.06,"QBQTC":31.37,"STS22 (zh)":66.13,"STSB":81.17} -{"index":46,"Rank":5,"Model":"Dmeta-embedding-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.89,"AFQMC":71.13,"ATEC":64.64,"BQ":72.3,"LCQMC":78.76,"PAWSX":45.05,"QBQTC":42.62,"STS22 (zh)":63.84,"STSB":80.77} -{"index":207,"Rank":6,"Model":"xiaobu-embedding-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.53,"AFQMC":60.96,"ATEC":58.81,"BQ":75.08,"LCQMC":79.82,"PAWSX":47.42,"QBQTC":45.14,"STS22 (zh)":66.96,"STSB":82.05} -{"index":169,"Rank":7,"Model":"zpoint_large_embedding_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.22,"AFQMC":60.72,"ATEC":58.77,"BQ":75.04,"LCQMC":79.75,"PAWSX":45.94,"QBQTC":43.46,"STS22 (zh)":66.71,"STSB":83.34} -{"index":253,"Rank":8,"Model":"piccolo-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.5,"AFQMC":61.49,"ATEC":59.2,"BQ":72.88,"LCQMC":79.55,"PAWSX":46.48,"QBQTC":45.91,"STS22 (zh)":63.76,"STSB":78.71} -{"index":38,"Rank":9,"Model":"Yinka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.33,"AFQMC":61.02,"ATEC":59.02,"BQ":72.76,"LCQMC":79.43,"PAWSX":45.92,"QBQTC":44.87,"STS22 (zh)":64.41,"STSB":79.24} -{"index":50,"Rank":10,"Model":"IYun-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.23,"AFQMC":60.89,"ATEC":58.82,"BQ":72.53,"LCQMC":79.19,"PAWSX":45.8,"QBQTC":43.94,"STS22 (zh)":65.18,"STSB":79.48} -{"index":173,"Rank":11,"Model":"stella-base-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.49,"AFQMC":59.22,"ATEC":58.43,"BQ":70.44,"LCQMC":78.27,"PAWSX":43.97,"QBQTC":40.02,"STS22 (zh)":66.49,"STSB":83.05} -{"index":116,"Rank":12,"Model":"stella-mrl-large-zh-v3.5-1792d-1024<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.48,"AFQMC":58.85,"ATEC":58.08,"BQ":71.05,"LCQMC":78.26,"PAWSX":45.36,"QBQTC":38.98,"STS22 (zh)":66.26,"STSB":82.96} -{"index":155,"Rank":13,"Model":"stella-mrl-large-zh-v3.5-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.48,"AFQMC":58.85,"ATEC":58.08,"BQ":71.05,"LCQMC":78.26,"PAWSX":45.36,"QBQTC":38.98,"STS22 (zh)":66.26,"STSB":82.96} -{"index":154,"Rank":14,"Model":"stella-large-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.46,"AFQMC":59.11,"ATEC":58.19,"BQ":71.07,"LCQMC":78.27,"PAWSX":45.0,"QBQTC":38.69,"STS22 (zh)":66.53,"STSB":82.8} -{"index":12,"Rank":15,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.46,"AFQMC":59.11,"ATEC":58.19,"BQ":71.07,"LCQMC":78.27,"PAWSX":44.98,"QBQTC":38.69,"STS22 (zh)":66.53,"STSB":82.8} -{"index":15,"Rank":16,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":62.32,"AFQMC":58.47,"ATEC":55.46,"BQ":77.59,"LCQMC":76.29,"PAWSX":50.22,"QBQTC":31.82,"STS22 (zh)":67.36,"STSB":81.37} -{"index":276,"Rank":17,"Model":"AGE_Hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.15,"AFQMC":58.61,"ATEC":58.03,"BQ":70.98,"LCQMC":78.14,"PAWSX":45.59,"QBQTC":38.1,"STS22 (zh)":65.34,"STSB":82.42} -{"index":47,"Rank":18,"Model":"Dmeta-embedding-zh-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.13,"AFQMC":59.54,"ATEC":58.63,"BQ":70.31,"LCQMC":78.73,"PAWSX":42.97,"QBQTC":41.03,"STS22 (zh)":66.76,"STSB":79.09} -{"index":129,"Rank":19,"Model":"acge_text_embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.09,"AFQMC":58.81,"ATEC":57.95,"BQ":70.37,"LCQMC":78.17,"PAWSX":45.51,"QBQTC":37.31,"STS22 (zh)":65.85,"STSB":82.73} -{"index":16,"Rank":20,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.96,"AFQMC":58.42,"ATEC":55.65,"BQ":73.85,"LCQMC":75.39,"PAWSX":42.46,"QBQTC":35.15,"STS22 (zh)":67.4,"STSB":79.4} -{"index":233,"Rank":21,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.96,"AFQMC":58.42,"ATEC":55.65,"BQ":73.85,"LCQMC":75.39,"PAWSX":42.46,"QBQTC":35.15,"STS22 (zh)":67.4,"STSB":79.4} -{"index":315,"Rank":22,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.07,"AFQMC":50.8,"ATEC":53.23,"BQ":66.49,"LCQMC":76.6,"PAWSX":47.56,"QBQTC":39.96,"STS22 (zh)":65.78,"STSB":80.14} -{"index":280,"Rank":23,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.39,"AFQMC":50.59,"ATEC":51.29,"BQ":66.07,"LCQMC":75.74,"PAWSX":41.49,"QBQTC":38.11,"STS22 (zh)":69.25,"STSB":82.56} -{"index":199,"Rank":24,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.39,"AFQMC":50.59,"ATEC":51.29,"BQ":66.07,"LCQMC":75.74,"PAWSX":41.49,"QBQTC":38.11,"STS22 (zh)":69.25,"STSB":82.56} -{"index":175,"Rank":25,"Model":"stella-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.66,"AFQMC":49.95,"ATEC":53.12,"BQ":65.54,"LCQMC":77.48,"PAWSX":36.22,"QBQTC":38.73,"STS22 (zh)":68.87,"STSB":79.37} -{"index":21,"Rank":26,"Model":"tao-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.66,"AFQMC":49.94,"ATEC":53.11,"BQ":65.54,"LCQMC":77.48,"PAWSX":36.22,"QBQTC":38.74,"STS22 (zh)":68.88,"STSB":79.37} -{"index":20,"Rank":27,"Model":"tao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.66,"AFQMC":49.94,"ATEC":53.11,"BQ":65.54,"LCQMC":77.48,"PAWSX":36.22,"QBQTC":38.74,"STS22 (zh)":68.88,"STSB":79.36} -{"index":206,"Rank":28,"Model":"xiaobu-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.52,"AFQMC":54.85,"ATEC":55.13,"BQ":65.57,"LCQMC":75.55,"PAWSX":37.87,"QBQTC":33.69,"STS22 (zh)":64.32,"STSB":81.19} -{"index":105,"Rank":29,"Model":"alime-embedding-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.41,"AFQMC":54.73,"ATEC":55.13,"BQ":64.89,"LCQMC":75.12,"PAWSX":37.93,"QBQTC":34.21,"STS22 (zh)":64.54,"STSB":80.72} -{"index":174,"Rank":30,"Model":"stella-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.3,"AFQMC":54.49,"ATEC":54.45,"BQ":62.38,"LCQMC":76.28,"PAWSX":37.97,"QBQTC":37.84,"STS22 (zh)":67.28,"STSB":75.71} -{"index":252,"Rank":31,"Model":"piccolo-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.02,"AFQMC":54.17,"ATEC":54.28,"BQ":62.31,"LCQMC":75.81,"PAWSX":38.31,"QBQTC":38.22,"STS22 (zh)":66.66,"STSB":74.43} -{"index":306,"Rank":32,"Model":"bi-cse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.91,"AFQMC":43.45,"ATEC":48.01,"BQ":65.58,"LCQMC":75.56,"PAWSX":47.62,"QBQTC":40.43,"STS22 (zh)":67.24,"STSB":75.38} -{"index":286,"Rank":33,"Model":"gte-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.82,"AFQMC":54.58,"ATEC":54.6,"BQ":64.41,"LCQMC":74.4,"PAWSX":38.55,"QBQTC":33.36,"STS22 (zh)":62.98,"STSB":79.68} -{"index":51,"Rank":34,"Model":"mist-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.08,"AFQMC":46.98,"ATEC":51.35,"BQ":65.08,"LCQMC":76.96,"PAWSX":30.03,"QBQTC":37.5,"STS22 (zh)":69.2,"STSB":79.57} -{"index":172,"Rank":35,"Model":"stella-base-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.92,"AFQMC":46.73,"ATEC":51.22,"BQ":64.8,"LCQMC":76.89,"PAWSX":29.7,"QBQTC":37.38,"STS22 (zh)":69.02,"STSB":79.63} -{"index":29,"Rank":36,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.87,"AFQMC":47.17,"ATEC":50.75,"BQ":62.02,"LCQMC":75.95,"PAWSX":30.57,"QBQTC":38.98,"STS22 (zh)":68.68,"STSB":80.87} -{"index":171,"Rank":37,"Model":"stella-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.54,"AFQMC":51.75,"ATEC":52.82,"BQ":63.29,"LCQMC":75.83,"PAWSX":32.02,"QBQTC":36.47,"STS22 (zh)":67.52,"STSB":72.63} -{"index":27,"Rank":38,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":56.25,"AFQMC":44.36,"ATEC":49.54,"BQ":62.94,"LCQMC":74.33,"PAWSX":33.92,"QBQTC":37.29,"STS22 (zh)":68.94,"STSB":78.7} -{"index":284,"Rank":39,"Model":"gte-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.96,"AFQMC":49.07,"ATEC":50.83,"BQ":65.5,"LCQMC":74.06,"PAWSX":27.97,"QBQTC":35.19,"STS22 (zh)":63.64,"STSB":81.46} -{"index":251,"Rank":40,"Model":"piccolo-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.9,"AFQMC":51.4,"ATEC":52.59,"BQ":62.68,"LCQMC":75.4,"PAWSX":31.58,"QBQTC":36.48,"STS22 (zh)":66.51,"STSB":70.6} -{"index":40,"Rank":41,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.55,"AFQMC":46.7,"ATEC":49.95,"BQ":56.08,"LCQMC":74.96,"PAWSX":33.49,"QBQTC":31.06,"STS22 (zh)":64.51,"STSB":79.63} -{"index":41,"Rank":42,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.35,"AFQMC":46.77,"ATEC":50.18,"BQ":55.25,"LCQMC":74.29,"PAWSX":33.24,"QBQTC":30.92,"STS22 (zh)":64.41,"STSB":79.74} -{"index":23,"Rank":43,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":53.72,"AFQMC":42.4,"ATEC":48.17,"BQ":61.78,"LCQMC":74.45,"PAWSX":20.4,"QBQTC":36.22,"STS22 (zh)":68.01,"STSB":78.31} -{"index":26,"Rank":44,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":53.0,"AFQMC":43.06,"ATEC":48.29,"BQ":60.53,"LCQMC":74.71,"PAWSX":16.64,"QBQTC":35.2,"STS22 (zh)":67.19,"STSB":78.41} -{"index":39,"Rank":45,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.45,"AFQMC":44.25,"ATEC":48.24,"BQ":56.27,"LCQMC":74.09,"PAWSX":12.18,"QBQTC":29.94,"STS22 (zh)":66.06,"STSB":80.54} -{"index":226,"Rank":46,"Model":"m3e-ernie-xbase-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.54,"AFQMC":37.4,"ATEC":42.16,"BQ":70.96,"LCQMC":73.27,"PAWSX":18.55,"QBQTC":29.6,"STS22 (zh)":65.44,"STSB":66.91} -{"index":220,"Rank":47,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":50.47,"AFQMC":35.87,"ATEC":41.27,"BQ":63.81,"LCQMC":74.88,"PAWSX":12.19,"QBQTC":32.07,"STS22 (zh)":66.73,"STSB":76.97} -{"index":221,"Rank":48,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":50.42,"AFQMC":36.53,"ATEC":41.8,"BQ":65.2,"LCQMC":74.2,"PAWSX":15.95,"QBQTC":32.65,"STS22 (zh)":62.91,"STSB":74.16} -{"index":180,"Rank":49,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":50.22,"AFQMC":38.99,"ATEC":42.84,"BQ":50.64,"LCQMC":75.48,"PAWSX":16.81,"QBQTC":31.8,"STS22 (zh)":63.4,"STSB":81.81} -{"index":288,"Rank":50,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.72,"AFQMC":36.69,"ATEC":45.77,"BQ":49.64,"LCQMC":72.72,"PAWSX":12.77,"QBQTC":36.97,"STS22 (zh)":66.72,"STSB":76.51} -{"index":33,"Rank":51,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":49.1,"AFQMC":33.42,"ATEC":43.01,"BQ":55.22,"LCQMC":72.19,"PAWSX":9.26,"QBQTC":35.29,"STS22 (zh)":67.72,"STSB":76.73} -{"index":184,"Rank":52,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":48.29,"AFQMC":33.02,"ATEC":39.81,"BQ":46.44,"LCQMC":75.95,"PAWSX":14.63,"QBQTC":29.77,"STS22 (zh)":65.64,"STSB":81.08} -{"index":183,"Rank":53,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":46.49,"AFQMC":29.67,"ATEC":37.01,"BQ":45.45,"LCQMC":74.15,"PAWSX":12.14,"QBQTC":28.81,"STS22 (zh)":65.64,"STSB":79.05} -{"index":188,"Rank":54,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.6,"AFQMC":27.98,"ATEC":35.86,"BQ":46.23,"LCQMC":72.8,"PAWSX":15.66,"QBQTC":32.65,"STS22 (zh)":54.08,"STSB":79.53} -{"index":186,"Rank":55,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":45.27,"AFQMC":25.21,"ATEC":35.14,"BQ":43.27,"LCQMC":72.7,"PAWSX":11.01,"QBQTC":30.25,"STS22 (zh)":66.84,"STSB":77.73} -{"index":53,"Rank":56,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":44.97,"AFQMC":24.51,"ATEC":32.45,"BQ":44.22,"LCQMC":69.16,"PAWSX":14.55,"QBQTC":29.51,"STS22 (zh)":65.94,"STSB":79.45} -{"index":48,"Rank":57,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.62,"AFQMC":22.57,"ATEC":30.3,"BQ":40.98,"LCQMC":68.4,"PAWSX":15.08,"QBQTC":27.92,"STS22 (zh)":61.58,"STSB":82.17} -{"index":277,"Rank":58,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":43.41,"AFQMC":26.06,"ATEC":31.93,"BQ":42.67,"LCQMC":70.16,"PAWSX":17.21,"QBQTC":24.62,"STS22 (zh)":55.35,"STSB":79.3} -{"index":311,"Rank":59,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.35,"AFQMC":23.88,"ATEC":29.25,"BQ":45.33,"LCQMC":68.41,"PAWSX":16.55,"QBQTC":30.27,"STS22 (zh)":62.53,"STSB":70.61} -{"index":279,"Rank":60,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":42.78,"AFQMC":22.24,"ATEC":30.84,"BQ":43.33,"LCQMC":66.74,"PAWSX":12.31,"QBQTC":27.2,"STS22 (zh)":66.4,"STSB":73.22} -{"index":190,"Rank":61,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.57,"AFQMC":25.32,"ATEC":33.46,"BQ":43.93,"LCQMC":71.98,"PAWSX":14.23,"QBQTC":32.92,"STS22 (zh)":40.97,"STSB":77.77} -{"index":49,"Rank":62,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":39.11,"AFQMC":15.69,"ATEC":20.27,"BQ":36.33,"LCQMC":63.3,"PAWSX":12.16,"QBQTC":22.53,"STS22 (zh)":61.75,"STSB":80.84} -{"index":159,"Rank":63,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.48,"AFQMC":17.63,"ATEC":26.18,"BQ":37.66,"LCQMC":50.11,"PAWSX":32.75,"QBQTC":24.48,"STS22 (zh)":52.82,"STSB":50.18} -{"index":289,"Rank":64,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":20.86,"AFQMC":3.76,"ATEC":10.09,"BQ":19.31,"LCQMC":42.15,"PAWSX":6.14,"QBQTC":6.12,"STS22 (zh)":33.7,"STSB":45.6} -{"index":11,"Rank":76,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":49.41,"STSB":""} -{"index":67,"Rank":110,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":54.18,"STSB":""} -{"index":74,"Rank":117,"Model":"paraphrase-multilingual-mpnet-base-v2-KE_Sieve<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":60.0,"STSB":""} -{"index":91,"Rank":134,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":25.49,"STSB":""} -{"index":92,"Rank":135,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":6.52,"STSB":""} -{"index":126,"Rank":167,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":64.04,"STSB":""} -{"index":138,"Rank":178,"Model":"sgpt-bloom-1b7-nli<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":58.54,"STSB":""} -{"index":139,"Rank":179,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":66.78,"STSB":""} -{"index":189,"Rank":214,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":30.55,"ATEC":37.35,"BQ":47.17,"LCQMC":75.32,"PAWSX":17.46,"QBQTC":33.7,"STS22 (zh)":"","STSB":81.52} -{"index":191,"Rank":215,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":31.87,"ATEC":38.8,"BQ":47.67,"LCQMC":75.34,"PAWSX":17.25,"QBQTC":34.48,"STS22 (zh)":"","STSB":82.03} -{"index":254,"Rank":267,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":63.02,"STSB":""} -{"index":255,"Rank":268,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":33.15,"STSB":""} -{"index":256,"Rank":269,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":44.93,"STSB":""} -{"index":258,"Rank":271,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":16.35,"STSB":""} -{"index":260,"Rank":273,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":4.78,"STSB":""} -{"index":261,"Rank":274,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":54.32,"STSB":""} -{"index":263,"Rank":276,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":27.32,"STSB":""} -{"index":264,"Rank":277,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":31.16,"STSB":""} -{"index":270,"Rank":283,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":32.9,"STSB":""} -{"index":271,"Rank":284,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":30.47,"STSB":""} -{"index":272,"Rank":285,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":33.55,"STSB":""} -{"index":275,"Rank":287,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":32.83,"STSB":""} -{"index":278,"Rank":288,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":63.24,"STSB":""} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":47.05,"AFQMC":33.02,"AFQMC (cmn-Hans)":33.01,"ATEC":39.81,"ATEC (cmn-Hans)":39.8,"BQ":46.44,"BQ (cmn-Hans)":46.44,"LCQMC":75.95,"LCQMC (cmn-Hans)":75.95,"PAWSX":14.63,"PAWSX (cmn-Hans)":14.63,"QBQTC":29.77,"STSB":81.08,"STSB (cmn-Hans)":81.08} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":44.9,"AFQMC":29.67,"AFQMC (cmn-Hans)":29.66,"ATEC":37.01,"ATEC (cmn-Hans)":37.01,"BQ":45.45,"BQ (cmn-Hans)":45.45,"LCQMC":74.15,"LCQMC (cmn-Hans)":74.15,"PAWSX":12.14,"PAWSX (cmn-Hans)":12.13,"QBQTC":28.81,"STSB":79.05,"STSB (cmn-Hans)":79.04} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":43.1,"AFQMC":25.21,"AFQMC (cmn-Hans)":25.21,"ATEC":35.14,"ATEC (cmn-Hans)":35.14,"BQ":43.27,"BQ (cmn-Hans)":43.27,"LCQMC":72.7,"LCQMC (cmn-Hans)":72.7,"PAWSX":11.01,"PAWSX (cmn-Hans)":11.0,"QBQTC":30.25,"STSB":77.73,"STSB (cmn-Hans)":77.73} +{"Rank":4,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":5,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":6,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":7,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":8,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":9,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":10,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":12,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":13,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":14,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":15,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":16,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":59.11,"AFQMC (cmn-Hans)":null,"ATEC":58.19,"ATEC (cmn-Hans)":null,"BQ":71.07,"BQ (cmn-Hans)":null,"LCQMC":78.27,"LCQMC (cmn-Hans)":null,"PAWSX":44.98,"PAWSX (cmn-Hans)":null,"QBQTC":38.69,"STSB":82.8,"STSB (cmn-Hans)":null} +{"Rank":17,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":null,"AFQMC":58.47,"AFQMC (cmn-Hans)":null,"ATEC":55.46,"ATEC (cmn-Hans)":null,"BQ":77.59,"BQ (cmn-Hans)":null,"LCQMC":76.29,"LCQMC (cmn-Hans)":null,"PAWSX":50.22,"PAWSX (cmn-Hans)":null,"QBQTC":31.82,"STSB":81.37,"STSB (cmn-Hans)":null} +{"Rank":18,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"AFQMC":42.4,"AFQMC (cmn-Hans)":null,"ATEC":48.17,"ATEC (cmn-Hans)":null,"BQ":61.78,"BQ (cmn-Hans)":null,"LCQMC":74.45,"LCQMC (cmn-Hans)":null,"PAWSX":20.4,"PAWSX (cmn-Hans)":null,"QBQTC":36.22,"STSB":78.31,"STSB (cmn-Hans)":null} +{"Rank":19,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"AFQMC":43.06,"AFQMC (cmn-Hans)":null,"ATEC":48.29,"ATEC (cmn-Hans)":null,"BQ":60.53,"BQ (cmn-Hans)":null,"LCQMC":74.71,"LCQMC (cmn-Hans)":null,"PAWSX":16.64,"PAWSX (cmn-Hans)":null,"QBQTC":35.2,"STSB":78.41,"STSB (cmn-Hans)":null} +{"Rank":20,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"AFQMC":44.36,"AFQMC (cmn-Hans)":null,"ATEC":49.54,"ATEC (cmn-Hans)":null,"BQ":62.94,"BQ (cmn-Hans)":null,"LCQMC":74.33,"LCQMC (cmn-Hans)":null,"PAWSX":33.92,"PAWSX (cmn-Hans)":null,"QBQTC":37.29,"STSB":78.7,"STSB (cmn-Hans)":null} +{"Rank":21,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":22,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":null,"AFQMC":33.42,"AFQMC (cmn-Hans)":null,"ATEC":43.01,"ATEC (cmn-Hans)":null,"BQ":55.22,"BQ (cmn-Hans)":null,"LCQMC":72.19,"LCQMC (cmn-Hans)":null,"PAWSX":9.26,"PAWSX (cmn-Hans)":null,"QBQTC":35.29,"STSB":76.73,"STSB (cmn-Hans)":null} +{"Rank":23,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":24,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":25,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":26,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":27,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":28,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"AFQMC":24.51,"AFQMC (cmn-Hans)":null,"ATEC":32.45,"ATEC (cmn-Hans)":null,"BQ":44.22,"BQ (cmn-Hans)":null,"LCQMC":69.16,"LCQMC (cmn-Hans)":null,"PAWSX":14.55,"PAWSX (cmn-Hans)":null,"QBQTC":29.51,"STSB":79.45,"STSB (cmn-Hans)":null} +{"Rank":29,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":30,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":31,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":32,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":32.65,"ATEC":null,"ATEC (cmn-Hans)":37.34,"BQ":null,"BQ (cmn-Hans)":38.03,"LCQMC":null,"LCQMC (cmn-Hans)":71.38,"PAWSX":null,"PAWSX (cmn-Hans)":16.4,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":74.11} +{"Rank":33,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":34,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":35,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":36,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":37,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":38,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":39,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":40,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":41,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":42,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":43,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":44,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":45,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":46,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":47,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":48,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":49,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":50,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":51,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":52,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":53,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":54,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":55,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":56,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":57,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":58,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":59,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":60,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":61,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":62,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":63,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":64,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"AFQMC":35.87,"AFQMC (cmn-Hans)":null,"ATEC":41.27,"ATEC (cmn-Hans)":null,"BQ":63.81,"BQ (cmn-Hans)":null,"LCQMC":74.88,"LCQMC (cmn-Hans)":null,"PAWSX":12.19,"PAWSX (cmn-Hans)":null,"QBQTC":32.07,"STSB":76.97,"STSB (cmn-Hans)":null} +{"Rank":65,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"AFQMC":36.53,"AFQMC (cmn-Hans)":null,"ATEC":41.8,"ATEC (cmn-Hans)":null,"BQ":65.2,"BQ (cmn-Hans)":null,"LCQMC":74.2,"LCQMC (cmn-Hans)":null,"PAWSX":15.95,"PAWSX (cmn-Hans)":null,"QBQTC":32.65,"STSB":74.16,"STSB (cmn-Hans)":null} +{"Rank":66,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":67,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":68,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":69,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":70,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":71,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":72,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":73,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":74,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":21.02,"ATEC":null,"ATEC (cmn-Hans)":26.61,"BQ":null,"BQ (cmn-Hans)":42.6,"LCQMC":null,"LCQMC (cmn-Hans)":52.19,"PAWSX":null,"PAWSX (cmn-Hans)":10.23,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":68.38} +{"Rank":75,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":7.94,"ATEC":null,"ATEC (cmn-Hans)":12.97,"BQ":null,"BQ (cmn-Hans)":23.31,"LCQMC":null,"LCQMC (cmn-Hans)":21.04,"PAWSX":null,"PAWSX (cmn-Hans)":7.31,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":36.66} +{"Rank":76,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":8.59,"ATEC":null,"ATEC (cmn-Hans)":13.52,"BQ":null,"BQ (cmn-Hans)":23.84,"LCQMC":null,"LCQMC (cmn-Hans)":23.85,"PAWSX":null,"PAWSX (cmn-Hans)":7.21,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":37.8} +{"Rank":77,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":8.01,"ATEC":null,"ATEC (cmn-Hans)":14.03,"BQ":null,"BQ (cmn-Hans)":21.39,"LCQMC":null,"LCQMC (cmn-Hans)":22.84,"PAWSX":null,"PAWSX (cmn-Hans)":6.44,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":37.7} +{"Rank":78,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":79,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":80,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":81,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":82,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":83,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":84,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":85,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":86,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":87,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":88,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":14.3,"ATEC":null,"ATEC (cmn-Hans)":18.42,"BQ":null,"BQ (cmn-Hans)":38.53,"LCQMC":null,"LCQMC (cmn-Hans)":63.96,"PAWSX":null,"PAWSX (cmn-Hans)":10.13,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":78.91} +{"Rank":89,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":15.69,"ATEC":null,"ATEC (cmn-Hans)":20.27,"BQ":null,"BQ (cmn-Hans)":36.33,"LCQMC":null,"LCQMC (cmn-Hans)":63.3,"PAWSX":null,"PAWSX (cmn-Hans)":12.16,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":80.84} +{"Rank":90,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":91,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":92,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":93,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":94,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":95,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":96,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"AFQMC":26.06,"AFQMC (cmn-Hans)":null,"ATEC":31.93,"ATEC (cmn-Hans)":null,"BQ":42.67,"BQ (cmn-Hans)":null,"LCQMC":70.16,"LCQMC (cmn-Hans)":null,"PAWSX":17.21,"PAWSX (cmn-Hans)":null,"QBQTC":24.62,"STSB":79.3,"STSB (cmn-Hans)":null} +{"Rank":97,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":null,"AFQMC":22.24,"AFQMC (cmn-Hans)":null,"ATEC":30.84,"ATEC (cmn-Hans)":null,"BQ":43.33,"BQ (cmn-Hans)":null,"LCQMC":66.74,"LCQMC (cmn-Hans)":null,"PAWSX":12.31,"PAWSX (cmn-Hans)":null,"QBQTC":27.2,"STSB":73.22,"STSB (cmn-Hans)":null} +{"Rank":98,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":99,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":100,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":101,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":102,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":103,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":104,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":105,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":106,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":107,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":23.88,"AFQMC (cmn-Hans)":null,"ATEC":29.25,"ATEC (cmn-Hans)":null,"BQ":45.33,"BQ (cmn-Hans)":null,"LCQMC":68.41,"LCQMC (cmn-Hans)":null,"PAWSX":16.55,"PAWSX (cmn-Hans)":null,"QBQTC":30.27,"STSB":70.61,"STSB (cmn-Hans)":null} +{"Rank":108,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":109,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":110,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":111,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":50.8,"AFQMC (cmn-Hans)":null,"ATEC":53.23,"ATEC (cmn-Hans)":null,"BQ":66.49,"BQ (cmn-Hans)":null,"LCQMC":76.6,"LCQMC (cmn-Hans)":null,"PAWSX":47.56,"PAWSX (cmn-Hans)":null,"QBQTC":39.96,"STSB":80.14,"STSB (cmn-Hans)":null} +{"Rank":112,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} diff --git a/all_data_tasks/14/default.jsonl b/all_data_tasks/14/default.jsonl index 2908d739c66e9b8095ac945aa6ccbb65431d3b30..79d390547aa179d45176af4da59200f9fc3d82a1 100644 --- a/all_data_tasks/14/default.jsonl +++ b/all_data_tasks/14/default.jsonl @@ -1,24 +1,24 @@ -{"index":9,"Rank":1,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","BornholmBitextMining":47.37} -{"index":13,"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"BornholmBitextMining":46.4} -{"index":14,"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"BornholmBitextMining":44.16} -{"index":15,"Rank":4,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"BornholmBitextMining":43.89} -{"index":12,"Rank":5,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"BornholmBitextMining":40.27} -{"index":11,"Rank":6,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"BornholmBitextMining":40.15} -{"index":10,"Rank":7,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"BornholmBitextMining":40.09} -{"index":1,"Rank":8,"Model":"e5-dansk-test-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","BornholmBitextMining":37.97} -{"index":20,"Rank":9,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"BornholmBitextMining":29.68} -{"index":8,"Rank":10,"Model":"dfm-sentence-encoder-large-1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"BornholmBitextMining":15.93} -{"index":4,"Rank":11,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"BornholmBitextMining":14.08} -{"index":7,"Rank":12,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"BornholmBitextMining":11.65} -{"index":5,"Rank":13,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"BornholmBitextMining":9.88} -{"index":2,"Rank":14,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"BornholmBitextMining":6.6} -{"index":22,"Rank":15,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"BornholmBitextMining":6.34} -{"index":17,"Rank":16,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"BornholmBitextMining":6.08} -{"index":6,"Rank":17,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"BornholmBitextMining":4.53} -{"index":23,"Rank":18,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"BornholmBitextMining":4.42} -{"index":18,"Rank":19,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"BornholmBitextMining":2.9} -{"index":16,"Rank":20,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"BornholmBitextMining":1.44} -{"index":3,"Rank":21,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"BornholmBitextMining":0.85} -{"index":0,"Rank":22,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"BornholmBitextMining":""} -{"index":19,"Rank":23,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"BornholmBitextMining":""} -{"index":21,"Rank":24,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"BornholmBitextMining":""} +{"Rank":1,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":40.52,"BornholmBitextMining":43.89,"BornholmBitextMining (dan-Latn)":37.15} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":39.81,"BornholmBitextMining":46.4,"BornholmBitextMining (dan-Latn)":33.22} +{"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":36.88,"BornholmBitextMining":44.16,"BornholmBitextMining (dan-Latn)":29.61} +{"Rank":4,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":29.68,"BornholmBitextMining":29.68,"BornholmBitextMining (dan-Latn)":29.68} +{"Rank":5,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","BornholmBitextMining":"","BornholmBitextMining (dan-Latn)":45.13} +{"Rank":6,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","BornholmBitextMining":6.6,"BornholmBitextMining (dan-Latn)":""} +{"Rank":7,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":"","BornholmBitextMining":0.85,"BornholmBitextMining (dan-Latn)":""} +{"Rank":8,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","BornholmBitextMining":14.08,"BornholmBitextMining (dan-Latn)":""} +{"Rank":9,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","BornholmBitextMining":9.88,"BornholmBitextMining (dan-Latn)":""} +{"Rank":10,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":"","BornholmBitextMining":4.53,"BornholmBitextMining (dan-Latn)":""} +{"Rank":11,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":"","BornholmBitextMining":11.65,"BornholmBitextMining (dan-Latn)":""} +{"Rank":12,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","BornholmBitextMining":40.09,"BornholmBitextMining (dan-Latn)":""} +{"Rank":13,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","BornholmBitextMining":40.15,"BornholmBitextMining (dan-Latn)":""} +{"Rank":14,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","BornholmBitextMining":40.27,"BornholmBitextMining (dan-Latn)":""} +{"Rank":15,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","BornholmBitextMining":1.44,"BornholmBitextMining (dan-Latn)":""} +{"Rank":16,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"Average":"","BornholmBitextMining":6.08,"BornholmBitextMining (dan-Latn)":""} +{"Rank":17,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"Average":"","BornholmBitextMining":2.9,"BornholmBitextMining (dan-Latn)":""} +{"Rank":18,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","BornholmBitextMining":"","BornholmBitextMining (dan-Latn)":45.63} +{"Rank":19,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","BornholmBitextMining":"","BornholmBitextMining (dan-Latn)":35.25} +{"Rank":20,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","BornholmBitextMining":"","BornholmBitextMining (dan-Latn)":27.44} +{"Rank":21,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","BornholmBitextMining":"","BornholmBitextMining (dan-Latn)":19.67} +{"Rank":22,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","BornholmBitextMining":"","BornholmBitextMining (dan-Latn)":18.18} +{"Rank":23,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","BornholmBitextMining":6.34,"BornholmBitextMining (dan-Latn)":""} +{"Rank":24,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","BornholmBitextMining":4.42,"BornholmBitextMining (dan-Latn)":""} diff --git a/all_data_tasks/15/default.jsonl b/all_data_tasks/15/default.jsonl index 85cbd1ad9174d578900953c618f3758347b3407a..0db151fa509fb8152faba7dc76eaa5be2458dc72 100644 --- a/all_data_tasks/15/default.jsonl +++ b/all_data_tasks/15/default.jsonl @@ -1,47 +1,32 @@ -{"index":17,"Rank":1,"Model":"dfm-sentence-encoder-large-1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":61.62,"AngryTweetsClassification":54.42,"DKHateClassification":63.19,"DanishPoliticalCommentsClassification":37.76,"LccSentimentClassification":58.07,"MassiveIntentClassification (da)":65.83,"MassiveScenarioClassification (da)":71.61,"NordicLangClassification":75.98,"ScalaDaClassification":66.09} -{"index":24,"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":59.94,"AngryTweetsClassification":54.95,"DKHateClassification":66.02,"DanishPoliticalCommentsClassification":38.27,"LccSentimentClassification":59.6,"MassiveIntentClassification (da)":60.16,"MassiveScenarioClassification (da)":67.46,"NordicLangClassification":82.29,"ScalaDaClassification":50.77} -{"index":16,"Rank":3,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":59.16,"AngryTweetsClassification":53.8,"DKHateClassification":60.09,"DanishPoliticalCommentsClassification":36.6,"LccSentimentClassification":57.33,"MassiveIntentClassification (da)":60.55,"MassiveScenarioClassification (da)":64.16,"NordicLangClassification":77.68,"ScalaDaClassification":63.08} -{"index":13,"Rank":4,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":58.9,"AngryTweetsClassification":52.14,"DKHateClassification":62.13,"DanishPoliticalCommentsClassification":35.04,"LccSentimentClassification":56.27,"MassiveIntentClassification (da)":57.03,"MassiveScenarioClassification (da)":60.43,"NordicLangClassification":85.27,"ScalaDaClassification":62.85} -{"index":23,"Rank":5,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":58.61,"AngryTweetsClassification":54.65,"DKHateClassification":63.53,"DanishPoliticalCommentsClassification":36.69,"LccSentimentClassification":59.67,"MassiveIntentClassification (da)":60.16,"MassiveScenarioClassification (da)":67.46,"NordicLangClassification":75.94,"ScalaDaClassification":50.79} -{"index":45,"Rank":6,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":58.31,"AngryTweetsClassification":54.28,"DKHateClassification":59.3,"DanishPoliticalCommentsClassification":39.81,"LccSentimentClassification":58.0,"MassiveIntentClassification (da)":54.68,"MassiveScenarioClassification (da)":59.56,"NordicLangClassification":74.25,"ScalaDaClassification":66.59} -{"index":12,"Rank":7,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":57.68,"AngryTweetsClassification":52.14,"DKHateClassification":61.73,"DanishPoliticalCommentsClassification":34.84,"LccSentimentClassification":51.4,"MassiveIntentClassification (da)":56.69,"MassiveScenarioClassification (da)":61.93,"NordicLangClassification":84.69,"ScalaDaClassification":57.99} -{"index":30,"Rank":8,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"Average":56.34,"AngryTweetsClassification":52.48,"DKHateClassification":58.78,"DanishPoliticalCommentsClassification":34.14,"LccSentimentClassification":54.07,"MassiveIntentClassification (da)":53.16,"MassiveScenarioClassification (da)":57.17,"NordicLangClassification":82.67,"ScalaDaClassification":58.25} -{"index":26,"Rank":9,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":56.12,"AngryTweetsClassification":53.57,"DKHateClassification":60.73,"DanishPoliticalCommentsClassification":34.38,"LccSentimentClassification":57.87,"MassiveIntentClassification (da)":54.63,"MassiveScenarioClassification (da)":62.34,"NordicLangClassification":75.15,"ScalaDaClassification":50.3} -{"index":18,"Rank":10,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.03,"AngryTweetsClassification":49.62,"DKHateClassification":69.97,"DanishPoliticalCommentsClassification":37.59,"LccSentimentClassification":54.27,"MassiveIntentClassification (da)":62.03,"MassiveScenarioClassification (da)":67.76,"NordicLangClassification":48.4,"ScalaDaClassification":50.63} -{"index":31,"Rank":11,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"Average":54.21,"AngryTweetsClassification":49.04,"DKHateClassification":62.71,"DanishPoliticalCommentsClassification":33.53,"LccSentimentClassification":46.93,"MassiveIntentClassification (da)":45.98,"MassiveScenarioClassification (da)":50.51,"NordicLangClassification":84.25,"ScalaDaClassification":60.72} -{"index":46,"Rank":12,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":52.14,"AngryTweetsClassification":52.41,"DKHateClassification":56.78,"DanishPoliticalCommentsClassification":34.03,"LccSentimentClassification":52.27,"MassiveIntentClassification (da)":41.06,"MassiveScenarioClassification (da)":43.91,"NordicLangClassification":79.39,"ScalaDaClassification":57.3} -{"index":20,"Rank":13,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":47.39,"AngryTweetsClassification":46.14,"DKHateClassification":58.72,"DanishPoliticalCommentsClassification":28.67,"LccSentimentClassification":42.13,"MassiveIntentClassification (da)":42.29,"MassiveScenarioClassification (da)":52.95,"NordicLangClassification":58.3,"ScalaDaClassification":49.9} -{"index":19,"Rank":14,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":47.02,"AngryTweetsClassification":45.06,"DKHateClassification":58.51,"DanishPoliticalCommentsClassification":28.43,"LccSentimentClassification":37.47,"MassiveIntentClassification (da)":44.25,"MassiveScenarioClassification (da)":52.99,"NordicLangClassification":59.34,"ScalaDaClassification":50.08} -{"index":8,"Rank":15,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":46.67,"AngryTweetsClassification":44.46,"DKHateClassification":59.36,"DanishPoliticalCommentsClassification":28.32,"LccSentimentClassification":47.2,"MassiveIntentClassification (da)":42.84,"MassiveScenarioClassification (da)":49.64,"NordicLangClassification":51.45,"ScalaDaClassification":50.12} -{"index":29,"Rank":16,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":46.33,"AngryTweetsClassification":47.91,"DKHateClassification":59.45,"DanishPoliticalCommentsClassification":31.89,"LccSentimentClassification":47.93,"MassiveIntentClassification (da)":26.3,"MassiveScenarioClassification (da)":28.93,"NordicLangClassification":57.82,"ScalaDaClassification":70.41} -{"index":22,"Rank":17,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":45.66,"AngryTweetsClassification":43.6,"DKHateClassification":57.57,"DanishPoliticalCommentsClassification":28.37,"LccSentimentClassification":40.27,"MassiveIntentClassification (da)":41.89,"MassiveScenarioClassification (da)":49.93,"NordicLangClassification":53.47,"ScalaDaClassification":50.15} -{"index":6,"Rank":18,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":45.58,"AngryTweetsClassification":44.58,"DKHateClassification":55.53,"DanishPoliticalCommentsClassification":28.97,"LccSentimentClassification":41.2,"MassiveIntentClassification (da)":37.98,"MassiveScenarioClassification (da)":40.44,"NordicLangClassification":62.45,"ScalaDaClassification":53.53} -{"index":34,"Rank":19,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":44.46,"AngryTweetsClassification":42.49,"DKHateClassification":55.05,"DanishPoliticalCommentsClassification":26.96,"LccSentimentClassification":38.47,"MassiveIntentClassification (da)":40.99,"MassiveScenarioClassification (da)":47.01,"NordicLangClassification":54.71,"ScalaDaClassification":50.03} -{"index":7,"Rank":20,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":33.61,"AngryTweetsClassification":40.52,"DKHateClassification":52.28,"DanishPoliticalCommentsClassification":25.17,"LccSentimentClassification":36.67,"MassiveIntentClassification (da)":6.51,"MassiveScenarioClassification (da)":11.5,"NordicLangClassification":44.53,"ScalaDaClassification":51.66} -{"index":0,"Rank":21,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":45.61,"MassiveScenarioClassification (da)":54.87,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":1,"Rank":22,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":46.17,"DKHateClassification":55.9,"DanishPoliticalCommentsClassification":28.33,"LccSentimentClassification":42.27,"MassiveIntentClassification (da)":40.37,"MassiveScenarioClassification (da)":49.35,"NordicLangClassification":"","ScalaDaClassification":50.11} -{"index":2,"Rank":23,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":31.47,"MassiveScenarioClassification (da)":39.79,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":3,"Rank":24,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":35.53,"MassiveScenarioClassification (da)":42.65,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":4,"Rank":25,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":"","MassiveScenarioClassification (da)":"","NordicLangClassification":"","ScalaDaClassification":""} -{"index":5,"Rank":26,"Model":"e5-dansk-test-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":56.08,"DKHateClassification":"","DanishPoliticalCommentsClassification":40.88,"LccSentimentClassification":59.6,"MassiveIntentClassification (da)":"","MassiveScenarioClassification (da)":"","NordicLangClassification":61.0,"ScalaDaClassification":50.43} -{"index":9,"Rank":27,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":41.0,"MassiveScenarioClassification (da)":51.92,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":10,"Rank":28,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":44.18,"MassiveScenarioClassification (da)":45.44,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":11,"Rank":29,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":43.56,"MassiveScenarioClassification (da)":44.75,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":14,"Rank":30,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":55.06,"MassiveScenarioClassification (da)":64.26,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":15,"Rank":31,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":51.23,"MassiveScenarioClassification (da)":53.52,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":21,"Rank":32,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":72.53,"MassiveScenarioClassification (da)":76.76,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":25,"Rank":33,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":71.81,"MassiveScenarioClassification (da)":77.61,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":27,"Rank":34,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":49.54,"MassiveScenarioClassification (da)":52.53,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":28,"Rank":35,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":46.93,"MassiveScenarioClassification (da)":49.11,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":32,"Rank":36,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":58.25,"MassiveScenarioClassification (da)":65.24,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":33,"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":44.43,"MassiveScenarioClassification (da)":49.47,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":35,"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":"","MassiveScenarioClassification (da)":"","NordicLangClassification":"","ScalaDaClassification":""} -{"index":36,"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":38.47,"MassiveScenarioClassification (da)":39.93,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":37,"Rank":40,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":52.33,"MassiveScenarioClassification (da)":62.55,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":38,"Rank":41,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":45.3,"MassiveScenarioClassification (da)":54.88,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":39,"Rank":42,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":48.42,"MassiveScenarioClassification (da)":57.28,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":40,"Rank":43,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":38.82,"MassiveScenarioClassification (da)":48.36,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":41,"Rank":44,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":41.76,"MassiveScenarioClassification (da)":51.44,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":42,"Rank":45,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":47.69,"MassiveScenarioClassification (da)":55.79,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":43,"Rank":46,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":32.25,"MassiveScenarioClassification (da)":41.14,"NordicLangClassification":"","ScalaDaClassification":""} -{"index":44,"Rank":47,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":56.74,"MassiveScenarioClassification (da)":63.07,"NordicLangClassification":"","ScalaDaClassification":""} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":59.86,"AngryTweetsClassification":54.95,"AngryTweetsClassification (dan-Latn)":57.69,"DKHateClassification":66.02,"DanishPoliticalCommentsClassification":38.27,"DanishPoliticalCommentsClassification (dan-Latn)":39.43,"LccSentimentClassification":59.6,"LccSentimentClassification (dan-Latn)":61.53,"MassiveIntentClassification (da)":60.16,"MassiveScenarioClassification (da)":67.46,"NordicLangClassification":82.29,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":80.15,"ScalaDaClassification":50.77} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":58.13,"AngryTweetsClassification":54.65,"AngryTweetsClassification (dan-Latn)":56.28,"DKHateClassification":63.53,"DanishPoliticalCommentsClassification":36.69,"DanishPoliticalCommentsClassification (dan-Latn)":36.41,"LccSentimentClassification":59.67,"LccSentimentClassification (dan-Latn)":60.13,"MassiveIntentClassification (da)":60.16,"MassiveScenarioClassification (da)":67.46,"NordicLangClassification":75.94,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":75.85,"ScalaDaClassification":50.79} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":55.9,"AngryTweetsClassification":53.57,"AngryTweetsClassification (dan-Latn)":56.27,"DKHateClassification":60.73,"DanishPoliticalCommentsClassification":34.38,"DanishPoliticalCommentsClassification (dan-Latn)":34.82,"LccSentimentClassification":57.87,"LccSentimentClassification (dan-Latn)":58.6,"MassiveIntentClassification (da)":54.63,"MassiveScenarioClassification (da)":62.34,"NordicLangClassification":75.15,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":72.15,"ScalaDaClassification":50.3} +{"Rank":4,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":43.18,"AngryTweetsClassification":42.49,"AngryTweetsClassification (dan-Latn)":42.48,"DKHateClassification":55.05,"DanishPoliticalCommentsClassification":26.96,"DanishPoliticalCommentsClassification (dan-Latn)":26.7,"LccSentimentClassification":38.47,"LccSentimentClassification (dan-Latn)":38.53,"MassiveIntentClassification (da)":40.99,"MassiveScenarioClassification (da)":47.01,"NordicLangClassification":54.71,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":54.7,"ScalaDaClassification":50.03} +{"Rank":5,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":45.61,"MassiveScenarioClassification (da)":54.87,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":""} +{"Rank":6,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":54.68,"DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":37.69,"LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":57.2,"MassiveIntentClassification (da)":"","MassiveScenarioClassification (da)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":63.6,"ScalaDaClassification":""} +{"Rank":7,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","AngryTweetsClassification":44.58,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":55.53,"DanishPoliticalCommentsClassification":28.97,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":41.2,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":37.98,"MassiveScenarioClassification (da)":40.44,"NordicLangClassification":62.45,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":53.53} +{"Rank":8,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":"","AngryTweetsClassification":40.52,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":52.28,"DanishPoliticalCommentsClassification":25.17,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":36.67,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":6.51,"MassiveScenarioClassification (da)":11.5,"NordicLangClassification":44.53,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":51.66} +{"Rank":9,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","AngryTweetsClassification":44.46,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":59.36,"DanishPoliticalCommentsClassification":28.32,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":47.2,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":42.84,"MassiveScenarioClassification (da)":49.64,"NordicLangClassification":51.45,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":50.12} +{"Rank":10,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","AngryTweetsClassification":52.14,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":61.73,"DanishPoliticalCommentsClassification":34.84,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":51.4,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":56.69,"MassiveScenarioClassification (da)":61.93,"NordicLangClassification":84.69,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":57.99} +{"Rank":11,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":"","AngryTweetsClassification":52.14,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":62.13,"DanishPoliticalCommentsClassification":35.04,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":56.27,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":57.03,"MassiveScenarioClassification (da)":60.43,"NordicLangClassification":85.27,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":62.85} +{"Rank":12,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":"","AngryTweetsClassification":53.8,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":60.09,"DanishPoliticalCommentsClassification":36.6,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":57.33,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":60.55,"MassiveScenarioClassification (da)":64.16,"NordicLangClassification":77.68,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":63.08} +{"Rank":13,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AngryTweetsClassification":45.06,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":58.51,"DanishPoliticalCommentsClassification":28.43,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":37.47,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":44.25,"MassiveScenarioClassification (da)":52.99,"NordicLangClassification":59.34,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":50.08} +{"Rank":14,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","AngryTweetsClassification":46.14,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":58.72,"DanishPoliticalCommentsClassification":28.67,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":42.13,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":42.29,"MassiveScenarioClassification (da)":52.95,"NordicLangClassification":58.3,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":49.9} +{"Rank":15,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AngryTweetsClassification":43.6,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":57.57,"DanishPoliticalCommentsClassification":28.37,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":40.27,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":41.89,"MassiveScenarioClassification (da)":49.93,"NordicLangClassification":53.47,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":50.15} +{"Rank":16,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","AngryTweetsClassification":47.91,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":59.45,"DanishPoliticalCommentsClassification":31.89,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":47.93,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":26.3,"MassiveScenarioClassification (da)":28.93,"NordicLangClassification":57.82,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":70.41} +{"Rank":17,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"Average":"","AngryTweetsClassification":52.48,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":58.78,"DanishPoliticalCommentsClassification":34.14,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":54.07,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":53.16,"MassiveScenarioClassification (da)":57.17,"NordicLangClassification":82.67,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":58.25} +{"Rank":18,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"Average":"","AngryTweetsClassification":49.04,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":62.71,"DanishPoliticalCommentsClassification":33.53,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":46.93,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":45.98,"MassiveScenarioClassification (da)":50.51,"NordicLangClassification":84.25,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":60.72} +{"Rank":19,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":51.11,"DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":38.34,"LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":50.07,"MassiveIntentClassification (da)":58.25,"MassiveScenarioClassification (da)":65.24,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":35.39,"ScalaDaClassification":""} +{"Rank":20,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":42.87,"DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":27.07,"LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":41.93,"MassiveIntentClassification (da)":44.43,"MassiveScenarioClassification (da)":49.47,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":54.17,"ScalaDaClassification":""} +{"Rank":21,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":44.13,"DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":28.31,"LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":39.27,"MassiveIntentClassification (da)":"","MassiveScenarioClassification (da)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":50.15,"ScalaDaClassification":""} +{"Rank":22,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":38.47,"MassiveScenarioClassification (da)":39.93,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":""} +{"Rank":23,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":52.33,"MassiveScenarioClassification (da)":62.55,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":""} +{"Rank":24,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":45.3,"MassiveScenarioClassification (da)":54.88,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":""} +{"Rank":25,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":48.42,"MassiveScenarioClassification (da)":57.28,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":""} +{"Rank":26,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":50.9,"DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":37.58,"LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":54.53,"MassiveIntentClassification (da)":"","MassiveScenarioClassification (da)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":42.52,"ScalaDaClassification":""} +{"Rank":27,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":54.84,"DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":40.96,"LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":58.4,"MassiveIntentClassification (da)":"","MassiveScenarioClassification (da)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":41.57,"ScalaDaClassification":""} +{"Rank":28,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":38.82,"MassiveScenarioClassification (da)":48.36,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":""} +{"Rank":29,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":41.76,"MassiveScenarioClassification (da)":51.44,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":""} +{"Rank":30,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":47.69,"MassiveScenarioClassification (da)":55.79,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":""} +{"Rank":31,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","AngryTweetsClassification":54.28,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":59.3,"DanishPoliticalCommentsClassification":39.81,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":58.0,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":54.68,"MassiveScenarioClassification (da)":59.56,"NordicLangClassification":74.25,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":66.59} +{"Rank":32,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","AngryTweetsClassification":52.41,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":56.78,"DanishPoliticalCommentsClassification":34.03,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":52.27,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":41.06,"MassiveScenarioClassification (da)":43.91,"NordicLangClassification":79.39,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":57.3} diff --git a/all_data_tasks/16/default.jsonl b/all_data_tasks/16/default.jsonl index 2e65c4d5b9bc3c1c2570e674b0ab0a75d9a90aec..c2c67a443c29a551d3ed5f619dbb294c2a64b2a3 100644 --- a/all_data_tasks/16/default.jsonl +++ b/all_data_tasks/16/default.jsonl @@ -1,77 +1,57 @@ -{"index":9,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":81.76,"AmazonReviewsClassification (fr)":55.53,"MasakhaNEWSClassification (fra)":82.61,"MassiveIntentClassification (fr)":81.65,"MassiveScenarioClassification (fr)":86.64,"MTOPDomainClassification (fr)":96.69,"MTOPIntentClassification (fr)":87.47} -{"index":56,"Rank":2,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.76,"AmazonReviewsClassification (fr)":55.53,"MasakhaNEWSClassification (fra)":82.61,"MassiveIntentClassification (fr)":81.65,"MassiveScenarioClassification (fr)":86.64,"MTOPDomainClassification (fr)":96.69,"MTOPIntentClassification (fr)":87.47} -{"index":34,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.76,"AmazonReviewsClassification (fr)":55.53,"MasakhaNEWSClassification (fra)":82.61,"MassiveIntentClassification (fr)":81.65,"MassiveScenarioClassification (fr)":86.64,"MTOPDomainClassification (fr)":96.69,"MTOPIntentClassification (fr)":87.47} -{"index":10,"Rank":4,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.62,"AmazonReviewsClassification (fr)":55.19,"MasakhaNEWSClassification (fra)":82.49,"MassiveIntentClassification (fr)":79.6,"MassiveScenarioClassification (fr)":82.18,"MTOPDomainClassification (fr)":97.2,"MTOPIntentClassification (fr)":93.07} -{"index":55,"Rank":5,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.02,"AmazonReviewsClassification (fr)":53.47,"MasakhaNEWSClassification (fra)":85.19,"MassiveIntentClassification (fr)":76.65,"MassiveScenarioClassification (fr)":79.1,"MTOPDomainClassification (fr)":93.48,"MTOPIntentClassification (fr)":80.23} -{"index":8,"Rank":6,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.02,"AmazonReviewsClassification (fr)":53.47,"MasakhaNEWSClassification (fra)":85.19,"MassiveIntentClassification (fr)":76.65,"MassiveScenarioClassification (fr)":79.1,"MTOPDomainClassification (fr)":93.48,"MTOPIntentClassification (fr)":80.23} -{"index":29,"Rank":7,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.02,"AmazonReviewsClassification (fr)":42.08,"MasakhaNEWSClassification (fra)":81.52,"MassiveIntentClassification (fr)":67.4,"MassiveScenarioClassification (fr)":71.29,"MTOPDomainClassification (fr)":89.26,"MTOPIntentClassification (fr)":68.55} -{"index":81,"Rank":8,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.27,"AmazonReviewsClassification (fr)":43.76,"MasakhaNEWSClassification (fra)":81.52,"MassiveIntentClassification (fr)":65.42,"MassiveScenarioClassification (fr)":71.11,"MTOPDomainClassification (fr)":89.38,"MTOPIntentClassification (fr)":64.45} -{"index":22,"Rank":9,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.16,"AmazonReviewsClassification (fr)":43.42,"MasakhaNEWSClassification (fra)":80.57,"MassiveIntentClassification (fr)":66.81,"MassiveScenarioClassification (fr)":72.99,"MTOPDomainClassification (fr)":88.33,"MTOPIntentClassification (fr)":62.85} -{"index":0,"Rank":10,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.61,"AmazonReviewsClassification (fr)":41.59,"MasakhaNEWSClassification (fra)":81.4,"MassiveIntentClassification (fr)":62.83,"MassiveScenarioClassification (fr)":69.71,"MTOPDomainClassification (fr)":90.05,"MTOPIntentClassification (fr)":66.09} -{"index":4,"Rank":11,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.56,"AmazonReviewsClassification (fr)":43.36,"MasakhaNEWSClassification (fra)":74.81,"MassiveIntentClassification (fr)":68.06,"MassiveScenarioClassification (fr)":74.29,"MTOPDomainClassification (fr)":90.33,"MTOPIntentClassification (fr)":60.52} -{"index":51,"Rank":12,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.48,"AmazonReviewsClassification (fr)":36.62,"MasakhaNEWSClassification (fra)":80.4,"MassiveIntentClassification (fr)":65.86,"MassiveScenarioClassification (fr)":71.6,"MTOPDomainClassification (fr)":88.7,"MTOPIntentClassification (fr)":67.69} -{"index":3,"Rank":13,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.45,"AmazonReviewsClassification (fr)":41.98,"MasakhaNEWSClassification (fra)":76.42,"MassiveIntentClassification (fr)":66.94,"MassiveScenarioClassification (fr)":72.78,"MTOPDomainClassification (fr)":90.12,"MTOPIntentClassification (fr)":62.44} -{"index":45,"Rank":14,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":68.39,"AmazonReviewsClassification (fr)":41.91,"MasakhaNEWSClassification (fra)":79.38,"MassiveIntentClassification (fr)":69.34,"MassiveScenarioClassification (fr)":73.87,"MTOPDomainClassification (fr)":86.41,"MTOPIntentClassification (fr)":59.43} -{"index":50,"Rank":15,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.57,"AmazonReviewsClassification (fr)":42.33,"MasakhaNEWSClassification (fra)":70.52,"MassiveIntentClassification (fr)":66.7,"MassiveScenarioClassification (fr)":74.58,"MTOPDomainClassification (fr)":90.39,"MTOPIntentClassification (fr)":60.88} -{"index":2,"Rank":16,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.44,"AmazonReviewsClassification (fr)":42.15,"MasakhaNEWSClassification (fra)":82.13,"MassiveIntentClassification (fr)":63.08,"MassiveScenarioClassification (fr)":70.15,"MTOPDomainClassification (fr)":87.68,"MTOPIntentClassification (fr)":59.44} -{"index":52,"Rank":17,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.4,"AmazonReviewsClassification (fr)":36.48,"MasakhaNEWSClassification (fra)":73.18,"MassiveIntentClassification (fr)":66.3,"MassiveScenarioClassification (fr)":71.7,"MTOPDomainClassification (fr)":88.96,"MTOPIntentClassification (fr)":67.76} -{"index":72,"Rank":18,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":67.36,"AmazonReviewsClassification (fr)":46.09,"MasakhaNEWSClassification (fra)":79.1,"MassiveIntentClassification (fr)":65.91,"MassiveScenarioClassification (fr)":68.53,"MTOPDomainClassification (fr)":86.2,"MTOPIntentClassification (fr)":58.33} -{"index":53,"Rank":19,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.11,"AmazonReviewsClassification (fr)":35.11,"MasakhaNEWSClassification (fra)":75.17,"MassiveIntentClassification (fr)":66.48,"MassiveScenarioClassification (fr)":71.47,"MTOPDomainClassification (fr)":88.52,"MTOPIntentClassification (fr)":65.93} -{"index":15,"Rank":20,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.08,"AmazonReviewsClassification (fr)":41.89,"MasakhaNEWSClassification (fra)":83.06,"MassiveIntentClassification (fr)":62.94,"MassiveScenarioClassification (fr)":67.29,"MTOPDomainClassification (fr)":86.23,"MTOPIntentClassification (fr)":61.07} -{"index":44,"Rank":21,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":66.8,"AmazonReviewsClassification (fr)":40.94,"MasakhaNEWSClassification (fra)":79.69,"MassiveIntentClassification (fr)":67.95,"MassiveScenarioClassification (fr)":71.89,"MTOPDomainClassification (fr)":84.79,"MTOPIntentClassification (fr)":55.51} -{"index":21,"Rank":22,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.74,"AmazonReviewsClassification (fr)":40.35,"MasakhaNEWSClassification (fra)":77.44,"MassiveIntentClassification (fr)":64.99,"MassiveScenarioClassification (fr)":71.72,"MTOPDomainClassification (fr)":86.83,"MTOPIntentClassification (fr)":59.13} -{"index":23,"Rank":23,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.54,"AmazonReviewsClassification (fr)":44.11,"MasakhaNEWSClassification (fra)":69.81,"MassiveIntentClassification (fr)":66.14,"MassiveScenarioClassification (fr)":72.74,"MTOPDomainClassification (fr)":87.82,"MTOPIntentClassification (fr)":58.63} -{"index":77,"Rank":24,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.37,"AmazonReviewsClassification (fr)":35.09,"MasakhaNEWSClassification (fra)":72.04,"MassiveIntentClassification (fr)":65.8,"MassiveScenarioClassification (fr)":73.47,"MTOPDomainClassification (fr)":88.19,"MTOPIntentClassification (fr)":63.64} -{"index":54,"Rank":25,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.96,"AmazonReviewsClassification (fr)":34.62,"MasakhaNEWSClassification (fra)":74.29,"MassiveIntentClassification (fr)":65.67,"MassiveScenarioClassification (fr)":71.61,"MTOPDomainClassification (fr)":86.97,"MTOPIntentClassification (fr)":62.59} -{"index":80,"Rank":26,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.92,"AmazonReviewsClassification (fr)":43.51,"MasakhaNEWSClassification (fra)":72.61,"MassiveIntentClassification (fr)":65.15,"MassiveScenarioClassification (fr)":69.94,"MTOPDomainClassification (fr)":85.33,"MTOPIntentClassification (fr)":59.01} -{"index":36,"Rank":27,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":65.82,"AmazonReviewsClassification (fr)":37.97,"MasakhaNEWSClassification (fra)":80.62,"MassiveIntentClassification (fr)":62.65,"MassiveScenarioClassification (fr)":69.29,"MTOPDomainClassification (fr)":85.74,"MTOPIntentClassification (fr)":58.62} -{"index":24,"Rank":28,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.18,"AmazonReviewsClassification (fr)":36.48,"MasakhaNEWSClassification (fra)":78.44,"MassiveIntentClassification (fr)":64.57,"MassiveScenarioClassification (fr)":69.04,"MTOPDomainClassification (fr)":84.19,"MTOPIntentClassification (fr)":58.35} -{"index":71,"Rank":29,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":65.09,"AmazonReviewsClassification (fr)":43.52,"MasakhaNEWSClassification (fra)":80.09,"MassiveIntentClassification (fr)":60.99,"MassiveScenarioClassification (fr)":66.42,"MTOPDomainClassification (fr)":85.14,"MTOPIntentClassification (fr)":54.39} -{"index":68,"Rank":30,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":64.64,"AmazonReviewsClassification (fr)":39.0,"MasakhaNEWSClassification (fra)":78.1,"MassiveIntentClassification (fr)":61.88,"MassiveScenarioClassification (fr)":67.9,"MTOPDomainClassification (fr)":81.21,"MTOPIntentClassification (fr)":59.76} -{"index":57,"Rank":31,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":64.6,"AmazonReviewsClassification (fr)":38.52,"MasakhaNEWSClassification (fra)":77.39,"MassiveIntentClassification (fr)":60.47,"MassiveScenarioClassification (fr)":65.1,"MTOPDomainClassification (fr)":84.14,"MTOPIntentClassification (fr)":62.01} -{"index":30,"Rank":32,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":64.57,"AmazonReviewsClassification (fr)":34.79,"MasakhaNEWSClassification (fra)":79.29,"MassiveIntentClassification (fr)":59.41,"MassiveScenarioClassification (fr)":65.29,"MTOPDomainClassification (fr)":85.52,"MTOPIntentClassification (fr)":63.12} -{"index":76,"Rank":33,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.41,"AmazonReviewsClassification (fr)":33.51,"MasakhaNEWSClassification (fra)":82.06,"MassiveIntentClassification (fr)":61.19,"MassiveScenarioClassification (fr)":70.22,"MTOPDomainClassification (fr)":85.5,"MTOPIntentClassification (fr)":53.98} -{"index":47,"Rank":34,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":63.13,"AmazonReviewsClassification (fr)":39.68,"MasakhaNEWSClassification (fra)":77.65,"MassiveIntentClassification (fr)":65.47,"MassiveScenarioClassification (fr)":68.76,"MTOPDomainClassification (fr)":81.2,"MTOPIntentClassification (fr)":46.01} -{"index":63,"Rank":35,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":62.14,"AmazonReviewsClassification (fr)":35.7,"MasakhaNEWSClassification (fra)":76.87,"MassiveIntentClassification (fr)":57.02,"MassiveScenarioClassification (fr)":65.2,"MTOPDomainClassification (fr)":84.61,"MTOPIntentClassification (fr)":53.41} -{"index":70,"Rank":36,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":61.64,"AmazonReviewsClassification (fr)":41.48,"MasakhaNEWSClassification (fra)":80.43,"MassiveIntentClassification (fr)":57.01,"MassiveScenarioClassification (fr)":63.6,"MTOPDomainClassification (fr)":79.6,"MTOPIntentClassification (fr)":47.73} -{"index":14,"Rank":37,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.3,"AmazonReviewsClassification (fr)":38.6,"MasakhaNEWSClassification (fra)":82.58,"MassiveIntentClassification (fr)":56.31,"MassiveScenarioClassification (fr)":59.5,"MTOPDomainClassification (fr)":80.79,"MTOPIntentClassification (fr)":50.01} -{"index":67,"Rank":38,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":61.02,"AmazonReviewsClassification (fr)":35.3,"MasakhaNEWSClassification (fra)":76.09,"MassiveIntentClassification (fr)":57.52,"MassiveScenarioClassification (fr)":64.52,"MTOPDomainClassification (fr)":78.63,"MTOPIntentClassification (fr)":54.05} -{"index":7,"Rank":39,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.98,"AmazonReviewsClassification (fr)":35.07,"MasakhaNEWSClassification (fra)":76.0,"MassiveIntentClassification (fr)":56.03,"MassiveScenarioClassification (fr)":59.3,"MTOPDomainClassification (fr)":75.7,"MTOPIntentClassification (fr)":63.76} -{"index":1,"Rank":40,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.84,"AmazonReviewsClassification (fr)":37.26,"MasakhaNEWSClassification (fra)":80.19,"MassiveIntentClassification (fr)":53.7,"MassiveScenarioClassification (fr)":62.46,"MTOPDomainClassification (fr)":79.79,"MTOPIntentClassification (fr)":45.62} -{"index":69,"Rank":41,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":58.08,"AmazonReviewsClassification (fr)":37.35,"MasakhaNEWSClassification (fra)":81.21,"MassiveIntentClassification (fr)":51.13,"MassiveScenarioClassification (fr)":59.92,"MTOPDomainClassification (fr)":75.03,"MTOPIntentClassification (fr)":43.85} -{"index":43,"Rank":42,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":57.72,"AmazonReviewsClassification (fr)":36.71,"MasakhaNEWSClassification (fra)":80.59,"MassiveIntentClassification (fr)":46.39,"MassiveScenarioClassification (fr)":53.86,"MTOPDomainClassification (fr)":74.8,"MTOPIntentClassification (fr)":53.97} -{"index":35,"Rank":43,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":56.63,"AmazonReviewsClassification (fr)":36.03,"MasakhaNEWSClassification (fra)":70.36,"MassiveIntentClassification (fr)":51.59,"MassiveScenarioClassification (fr)":61.28,"MTOPDomainClassification (fr)":77.1,"MTOPIntentClassification (fr)":43.44} -{"index":75,"Rank":44,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.78,"AmazonReviewsClassification (fr)":34.25,"MasakhaNEWSClassification (fra)":73.84,"MassiveIntentClassification (fr)":51.93,"MassiveScenarioClassification (fr)":58.31,"MTOPDomainClassification (fr)":71.83,"MTOPIntentClassification (fr)":44.53} -{"index":48,"Rank":45,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.9,"AmazonReviewsClassification (fr)":35.12,"MasakhaNEWSClassification (fra)":80.83,"MassiveIntentClassification (fr)":43.21,"MassiveScenarioClassification (fr)":49.78,"MTOPDomainClassification (fr)":69.24,"MTOPIntentClassification (fr)":51.25} -{"index":5,"Rank":46,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":53.31,"AmazonReviewsClassification (fr)":31.12,"MasakhaNEWSClassification (fra)":65.9,"MassiveIntentClassification (fr)":46.13,"MassiveScenarioClassification (fr)":54.32,"MTOPDomainClassification (fr)":72.26,"MTOPIntentClassification (fr)":50.12} -{"index":58,"Rank":47,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":52.14,"AmazonReviewsClassification (fr)":27.54,"MasakhaNEWSClassification (fra)":72.2,"MassiveIntentClassification (fr)":44.82,"MassiveScenarioClassification (fr)":53.76,"MTOPDomainClassification (fr)":75.59,"MTOPIntentClassification (fr)":38.94} -{"index":66,"Rank":48,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":50.9,"AmazonReviewsClassification (fr)":27.05,"MasakhaNEWSClassification (fra)":75.62,"MassiveIntentClassification (fr)":42.64,"MassiveScenarioClassification (fr)":49.92,"MTOPDomainClassification (fr)":72.97,"MTOPIntentClassification (fr)":37.18} -{"index":42,"Rank":49,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":48.37,"AmazonReviewsClassification (fr)":29.02,"MasakhaNEWSClassification (fra)":75.69,"MassiveIntentClassification (fr)":38.01,"MassiveScenarioClassification (fr)":43.63,"MTOPDomainClassification (fr)":64.49,"MTOPIntentClassification (fr)":39.4} -{"index":41,"Rank":50,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":46.1,"AmazonReviewsClassification (fr)":29.39,"MasakhaNEWSClassification (fra)":64.0,"MassiveIntentClassification (fr)":37.3,"MassiveScenarioClassification (fr)":44.47,"MTOPDomainClassification (fr)":63.61,"MTOPIntentClassification (fr)":37.84} -{"index":16,"Rank":51,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":46.1,"AmazonReviewsClassification (fr)":29.38,"MasakhaNEWSClassification (fra)":63.93,"MassiveIntentClassification (fr)":37.28,"MassiveScenarioClassification (fr)":44.5,"MTOPDomainClassification (fr)":63.65,"MTOPIntentClassification (fr)":37.87} -{"index":18,"Rank":52,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":46.09,"AmazonReviewsClassification (fr)":29.39,"MasakhaNEWSClassification (fra)":63.91,"MassiveIntentClassification (fr)":37.3,"MassiveScenarioClassification (fr)":44.47,"MTOPDomainClassification (fr)":63.63,"MTOPIntentClassification (fr)":37.86} -{"index":17,"Rank":53,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":46.09,"AmazonReviewsClassification (fr)":29.35,"MasakhaNEWSClassification (fra)":63.89,"MassiveIntentClassification (fr)":37.28,"MassiveScenarioClassification (fr)":44.47,"MTOPDomainClassification (fr)":63.7,"MTOPIntentClassification (fr)":37.85} -{"index":78,"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":31.21,"AmazonReviewsClassification (fr)":26.75,"MasakhaNEWSClassification (fra)":60.5,"MassiveIntentClassification (fr)":13.58,"MassiveScenarioClassification (fr)":23.21,"MTOPDomainClassification (fr)":43.83,"MTOPIntentClassification (fr)":19.38} -{"index":79,"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":30.71,"AmazonReviewsClassification (fr)":26.62,"MasakhaNEWSClassification (fra)":65.76,"MassiveIntentClassification (fr)":15.82,"MassiveScenarioClassification (fr)":23.92,"MTOPDomainClassification (fr)":36.77,"MTOPIntentClassification (fr)":15.37} -{"index":49,"Rank":56,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":30.38,"AmazonReviewsClassification (fr)":26.85,"MasakhaNEWSClassification (fra)":67.94,"MassiveIntentClassification (fr)":15.09,"MassiveScenarioClassification (fr)":21.67,"MTOPDomainClassification (fr)":34.99,"MTOPIntentClassification (fr)":15.76} -{"index":40,"Rank":57,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":25.21,"AmazonReviewsClassification (fr)":22.45,"MasakhaNEWSClassification (fra)":55.64,"MassiveIntentClassification (fr)":16.41,"MassiveScenarioClassification (fr)":22.72,"MTOPDomainClassification (fr)":24.27,"MTOPIntentClassification (fr)":9.79} -{"index":38,"Rank":58,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":24.91,"AmazonReviewsClassification (fr)":24.9,"MasakhaNEWSClassification (fra)":71.14,"MassiveIntentClassification (fr)":6.98,"MassiveScenarioClassification (fr)":11.41,"MTOPDomainClassification (fr)":25.55,"MTOPIntentClassification (fr)":9.49} -{"index":39,"Rank":59,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":23.28,"AmazonReviewsClassification (fr)":23.52,"MasakhaNEWSClassification (fra)":62.61,"MassiveIntentClassification (fr)":6.24,"MassiveScenarioClassification (fr)":10.98,"MTOPDomainClassification (fr)":27.74,"MTOPIntentClassification (fr)":8.61} -{"index":11,"Rank":61,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":"","MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":37.53,"MassiveScenarioClassification (fr)":45.32,"MTOPDomainClassification (fr)":54.97,"MTOPIntentClassification (fr)":26.69} -{"index":12,"Rank":62,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":"","MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":32.64,"MassiveScenarioClassification (fr)":40.66,"MTOPDomainClassification (fr)":"","MTOPIntentClassification (fr)":""} -{"index":13,"Rank":63,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":"","MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":33.16,"MassiveScenarioClassification (fr)":40.92,"MTOPDomainClassification (fr)":"","MTOPIntentClassification (fr)":""} -{"index":20,"Rank":65,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":31.56,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":46.89,"MassiveScenarioClassification (fr)":56.99,"MTOPDomainClassification (fr)":79.8,"MTOPIntentClassification (fr)":38.96} -{"index":26,"Rank":67,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":26.39,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":43.44,"MassiveScenarioClassification (fr)":45.07,"MTOPDomainClassification (fr)":65.35,"MTOPIntentClassification (fr)":46.33} -{"index":27,"Rank":68,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":27.4,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":43.67,"MassiveScenarioClassification (fr)":45.92,"MTOPDomainClassification (fr)":63.13,"MTOPIntentClassification (fr)":44.34} -{"index":28,"Rank":69,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":37.84,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":"","MassiveScenarioClassification (fr)":"","MTOPDomainClassification (fr)":81.32,"MTOPIntentClassification (fr)":58.67} -{"index":31,"Rank":70,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":39.47,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":64.27,"MassiveScenarioClassification (fr)":69.76,"MTOPDomainClassification (fr)":86.22,"MTOPIntentClassification (fr)":59.43} -{"index":32,"Rank":71,"Model":"sgpt-bloom-1b7-nli<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":39.29,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":64.58,"MassiveScenarioClassification (fr)":69.6,"MTOPDomainClassification (fr)":83.8,"MTOPIntentClassification (fr)":63.36} -{"index":33,"Rank":72,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":35.92,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":66.95,"MassiveScenarioClassification (fr)":72.91,"MTOPDomainClassification (fr)":90.98,"MTOPIntentClassification (fr)":69.12} -{"index":37,"Rank":73,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":34.91,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":58.55,"MassiveScenarioClassification (fr)":63.02,"MTOPDomainClassification (fr)":86.19,"MTOPIntentClassification (fr)":66.75} -{"index":46,"Rank":74,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":48.51,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":73.32,"MassiveScenarioClassification (fr)":77.07,"MTOPDomainClassification (fr)":89.97,"MTOPIntentClassification (fr)":76.72} -{"index":59,"Rank":75,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","AmazonReviewsClassification (fr)":"","MasakhaNEWSClassification (fra)":74.05,"MassiveIntentClassification (fr)":"","MassiveScenarioClassification (fr)":"","MTOPDomainClassification (fr)":"","MTOPIntentClassification (fr)":""} -{"index":61,"Rank":77,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonReviewsClassification (fr)":23.31,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":38.41,"MassiveScenarioClassification (fr)":40.26,"MTOPDomainClassification (fr)":54.61,"MTOPIntentClassification (fr)":34.71} -{"index":64,"Rank":79,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AmazonReviewsClassification (fr)":33.48,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":54.83,"MassiveScenarioClassification (fr)":64.06,"MTOPDomainClassification (fr)":82.48,"MTOPIntentClassification (fr)":46.39} -{"index":65,"Rank":80,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AmazonReviewsClassification (fr)":35.48,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":57.67,"MassiveScenarioClassification (fr)":66.72,"MTOPDomainClassification (fr)":85.05,"MTOPIntentClassification (fr)":51.07} -{"index":73,"Rank":81,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":"","MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":30.84,"MassiveScenarioClassification (fr)":42.42,"MTOPDomainClassification (fr)":"","MTOPIntentClassification (fr)":""} -{"index":74,"Rank":82,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":33.45,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":58.8,"MassiveScenarioClassification (fr)":63.39,"MTOPDomainClassification (fr)":76.17,"MTOPIntentClassification (fr)":53.26} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null} +{"Rank":3,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null} +{"Rank":4,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null} +{"Rank":5,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null} +{"Rank":6,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":null} +{"Rank":7,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null} +{"Rank":8,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":9,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":10,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":12,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":13,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null} +{"Rank":14,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":15,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":16,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null} +{"Rank":17,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null} +{"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null} +{"Rank":19,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null} +{"Rank":20,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null} +{"Rank":21,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null} +{"Rank":22,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null} +{"Rank":23,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null} +{"Rank":24,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null} +{"Rank":25,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null} +{"Rank":26,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null} +{"Rank":27,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null} +{"Rank":28,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null} +{"Rank":29,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null} +{"Rank":30,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null} +{"Rank":31,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null} +{"Rank":32,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null} +{"Rank":33,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null} +{"Rank":34,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null} +{"Rank":35,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":36,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null} +{"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null} +{"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null} +{"Rank":40,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null} +{"Rank":41,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null} +{"Rank":42,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null} +{"Rank":43,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null} +{"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null} +{"Rank":45,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null} +{"Rank":46,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null} +{"Rank":47,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null} +{"Rank":48,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null} +{"Rank":49,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null} +{"Rank":50,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null} +{"Rank":51,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":52,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":53,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null} +{"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null} +{"Rank":56,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} diff --git a/all_data_tasks/17/default.jsonl b/all_data_tasks/17/default.jsonl index 85b680c352c7c535eb6463fa57fcf3170f427e4e..d382b80b753e3b81b03f3e76bca723834e97af47 100644 --- a/all_data_tasks/17/default.jsonl +++ b/all_data_tasks/17/default.jsonl @@ -1,60 +1,57 @@ -{"index":10,"Rank":1,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.48,"AlloProfClusteringP2P":71.2,"AlloProfClusteringS2S":59.64,"HALClusteringS2S":28.19,"MLSUMClusteringP2P (fr)":47.75,"MLSUMClusteringS2S (fr)":47.46,"MasakhaNEWSClusteringP2P (fra)":73.86,"MasakhaNEWSClusteringS2S (fra)":67.24} -{"index":9,"Rank":2,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":55.56,"AlloProfClusteringP2P":76.06,"AlloProfClusteringS2S":64.52,"HALClusteringS2S":30.83,"MLSUMClusteringP2P (fr)":50.03,"MLSUMClusteringS2S (fr)":43.7,"MasakhaNEWSClusteringP2P (fra)":60.19,"MasakhaNEWSClusteringS2S (fra)":63.62} -{"index":8,"Rank":3,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.01,"AlloProfClusteringP2P":70.55,"AlloProfClusteringS2S":55.42,"HALClusteringS2S":28.3,"MLSUMClusteringP2P (fr)":45.27,"MLSUMClusteringS2S (fr)":42.77,"MasakhaNEWSClusteringP2P (fra)":71.04,"MasakhaNEWSClusteringS2S (fra)":71.71} -{"index":22,"Rank":4,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.26,"AlloProfClusteringP2P":65.3,"AlloProfClusteringS2S":55.37,"HALClusteringS2S":26.27,"MLSUMClusteringP2P (fr)":42.6,"MLSUMClusteringS2S (fr)":42.92,"MasakhaNEWSClusteringP2P (fra)":71.29,"MasakhaNEWSClusteringS2S (fra)":55.09} -{"index":81,"Rank":5,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.67,"AlloProfClusteringP2P":64.83,"AlloProfClusteringS2S":53.52,"HALClusteringS2S":26.18,"MLSUMClusteringP2P (fr)":44.59,"MLSUMClusteringS2S (fr)":41.67,"MasakhaNEWSClusteringP2P (fra)":68.35,"MasakhaNEWSClusteringS2S (fra)":48.58} -{"index":21,"Rank":6,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.45,"AlloProfClusteringP2P":64.71,"AlloProfClusteringS2S":45.57,"HALClusteringS2S":25.37,"MLSUMClusteringP2P (fr)":44.23,"MLSUMClusteringS2S (fr)":44.58,"MasakhaNEWSClusteringP2P (fra)":61.58,"MasakhaNEWSClusteringS2S (fra)":46.1} -{"index":4,"Rank":7,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.57,"AlloProfClusteringP2P":65.37,"AlloProfClusteringS2S":47.03,"HALClusteringS2S":27.67,"MLSUMClusteringP2P (fr)":45.99,"MLSUMClusteringS2S (fr)":45.57,"MasakhaNEWSClusteringP2P (fra)":44.53,"MasakhaNEWSClusteringS2S (fra)":49.8} -{"index":24,"Rank":8,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.25,"AlloProfClusteringP2P":62.17,"AlloProfClusteringS2S":45.12,"HALClusteringS2S":23.56,"MLSUMClusteringP2P (fr)":43.3,"MLSUMClusteringS2S (fr)":42.77,"MasakhaNEWSClusteringP2P (fra)":52.88,"MasakhaNEWSClusteringS2S (fra)":53.93} -{"index":2,"Rank":9,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.46,"AlloProfClusteringP2P":61.63,"AlloProfClusteringS2S":50.67,"HALClusteringS2S":27.44,"MLSUMClusteringP2P (fr)":45.23,"MLSUMClusteringS2S (fr)":41.48,"MasakhaNEWSClusteringP2P (fra)":56.59,"MasakhaNEWSClusteringS2S (fra)":35.18} -{"index":0,"Rank":10,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.74,"AlloProfClusteringP2P":62.01,"AlloProfClusteringS2S":49.2,"HALClusteringS2S":26.17,"MLSUMClusteringP2P (fr)":45.28,"MLSUMClusteringS2S (fr)":42.74,"MasakhaNEWSClusteringP2P (fra)":48.13,"MasakhaNEWSClusteringS2S (fra)":39.62} -{"index":3,"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.23,"AlloProfClusteringP2P":62.5,"AlloProfClusteringS2S":44.28,"HALClusteringS2S":26.36,"MLSUMClusteringP2P (fr)":44.03,"MLSUMClusteringS2S (fr)":42.95,"MasakhaNEWSClusteringP2P (fra)":50.68,"MasakhaNEWSClusteringS2S (fra)":38.79} -{"index":72,"Rank":12,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":43.25,"AlloProfClusteringP2P":60.98,"AlloProfClusteringS2S":43.5,"HALClusteringS2S":21.4,"MLSUMClusteringP2P (fr)":42.24,"MLSUMClusteringS2S (fr)":35.25,"MasakhaNEWSClusteringP2P (fra)":61.15,"MasakhaNEWSClusteringS2S (fra)":38.24} -{"index":36,"Rank":13,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":43.17,"AlloProfClusteringP2P":62.69,"AlloProfClusteringS2S":42.06,"HALClusteringS2S":23.9,"MLSUMClusteringP2P (fr)":42.04,"MLSUMClusteringS2S (fr)":32.29,"MasakhaNEWSClusteringP2P (fra)":54.51,"MasakhaNEWSClusteringS2S (fra)":44.73} -{"index":44,"Rank":14,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":42.66,"AlloProfClusteringP2P":62.09,"AlloProfClusteringS2S":32.98,"HALClusteringS2S":22.48,"MLSUMClusteringP2P (fr)":43.48,"MLSUMClusteringS2S (fr)":38.53,"MasakhaNEWSClusteringP2P (fra)":47.91,"MasakhaNEWSClusteringS2S (fra)":51.16} -{"index":76,"Rank":15,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.0,"AlloProfClusteringP2P":56.9,"AlloProfClusteringS2S":37.84,"HALClusteringS2S":18.95,"MLSUMClusteringP2P (fr)":43.9,"MLSUMClusteringS2S (fr)":35.5,"MasakhaNEWSClusteringP2P (fra)":60.57,"MasakhaNEWSClusteringS2S (fra)":40.31} -{"index":1,"Rank":16,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.88,"AlloProfClusteringP2P":57.96,"AlloProfClusteringS2S":41.65,"HALClusteringS2S":24.84,"MLSUMClusteringP2P (fr)":45.08,"MLSUMClusteringS2S (fr)":38.77,"MasakhaNEWSClusteringP2P (fra)":48.54,"MasakhaNEWSClusteringS2S (fra)":36.33} -{"index":54,"Rank":17,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.82,"AlloProfClusteringP2P":57.43,"AlloProfClusteringS2S":39.09,"HALClusteringS2S":25.77,"MLSUMClusteringP2P (fr)":42.03,"MLSUMClusteringS2S (fr)":41.83,"MasakhaNEWSClusteringP2P (fra)":49.68,"MasakhaNEWSClusteringS2S (fra)":36.91} -{"index":30,"Rank":18,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":41.7,"AlloProfClusteringP2P":64.12,"AlloProfClusteringS2S":32.52,"HALClusteringS2S":23.4,"MLSUMClusteringP2P (fr)":42.94,"MLSUMClusteringS2S (fr)":33.91,"MasakhaNEWSClusteringP2P (fra)":53.94,"MasakhaNEWSClusteringS2S (fra)":41.05} -{"index":71,"Rank":19,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":41.6,"AlloProfClusteringP2P":60.37,"AlloProfClusteringS2S":40.76,"HALClusteringS2S":20.28,"MLSUMClusteringP2P (fr)":41.61,"MLSUMClusteringS2S (fr)":33.6,"MasakhaNEWSClusteringP2P (fra)":62.82,"MasakhaNEWSClusteringS2S (fra)":31.74} -{"index":51,"Rank":20,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.33,"AlloProfClusteringP2P":59.89,"AlloProfClusteringS2S":38.46,"HALClusteringS2S":25.68,"MLSUMClusteringP2P (fr)":44.01,"MLSUMClusteringS2S (fr)":36.92,"MasakhaNEWSClusteringP2P (fra)":47.22,"MasakhaNEWSClusteringS2S (fra)":37.16} -{"index":7,"Rank":21,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.21,"AlloProfClusteringP2P":64.0,"AlloProfClusteringS2S":29.93,"HALClusteringS2S":20.82,"MLSUMClusteringP2P (fr)":45.26,"MLSUMClusteringS2S (fr)":44.95,"MasakhaNEWSClusteringP2P (fra)":51.34,"MasakhaNEWSClusteringS2S (fra)":32.2} -{"index":43,"Rank":22,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":41.16,"AlloProfClusteringP2P":61.06,"AlloProfClusteringS2S":28.12,"HALClusteringS2S":19.69,"MLSUMClusteringP2P (fr)":45.59,"MLSUMClusteringS2S (fr)":32.0,"MasakhaNEWSClusteringP2P (fra)":52.47,"MasakhaNEWSClusteringS2S (fra)":49.2} -{"index":15,"Rank":23,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.7,"AlloProfClusteringP2P":63.53,"AlloProfClusteringS2S":36.18,"HALClusteringS2S":19.9,"MLSUMClusteringP2P (fr)":45.08,"MLSUMClusteringS2S (fr)":34.75,"MasakhaNEWSClusteringP2P (fra)":53.18,"MasakhaNEWSClusteringS2S (fra)":32.31} -{"index":70,"Rank":24,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":40.6,"AlloProfClusteringP2P":61.82,"AlloProfClusteringS2S":39.78,"HALClusteringS2S":18.73,"MLSUMClusteringP2P (fr)":42.07,"MLSUMClusteringS2S (fr)":31.87,"MasakhaNEWSClusteringP2P (fra)":58.6,"MasakhaNEWSClusteringS2S (fra)":31.33} -{"index":42,"Rank":25,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":40.43,"AlloProfClusteringP2P":60.66,"AlloProfClusteringS2S":35.05,"HALClusteringS2S":20.9,"MLSUMClusteringP2P (fr)":43.5,"MLSUMClusteringS2S (fr)":30.99,"MasakhaNEWSClusteringP2P (fra)":49.71,"MasakhaNEWSClusteringS2S (fra)":42.23} -{"index":53,"Rank":26,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.4,"AlloProfClusteringP2P":56.73,"AlloProfClusteringS2S":37.62,"HALClusteringS2S":25.76,"MLSUMClusteringP2P (fr)":41.82,"MLSUMClusteringS2S (fr)":41.83,"MasakhaNEWSClusteringP2P (fra)":42.63,"MasakhaNEWSClusteringS2S (fra)":36.4} -{"index":69,"Rank":27,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":40.07,"AlloProfClusteringP2P":58.44,"AlloProfClusteringS2S":35.93,"HALClusteringS2S":17.72,"MLSUMClusteringP2P (fr)":40.77,"MLSUMClusteringS2S (fr)":30.06,"MasakhaNEWSClusteringP2P (fra)":61.9,"MasakhaNEWSClusteringS2S (fra)":35.64} -{"index":80,"Rank":28,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.86,"AlloProfClusteringP2P":56.89,"AlloProfClusteringS2S":38.2,"HALClusteringS2S":24.5,"MLSUMClusteringP2P (fr)":41.79,"MLSUMClusteringS2S (fr)":41.55,"MasakhaNEWSClusteringP2P (fra)":49.18,"MasakhaNEWSClusteringS2S (fra)":26.94} -{"index":52,"Rank":29,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.81,"AlloProfClusteringP2P":59.15,"AlloProfClusteringS2S":36.45,"HALClusteringS2S":24.97,"MLSUMClusteringP2P (fr)":42.49,"MLSUMClusteringS2S (fr)":34.45,"MasakhaNEWSClusteringP2P (fra)":47.58,"MasakhaNEWSClusteringS2S (fra)":33.58} -{"index":29,"Rank":30,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.65,"AlloProfClusteringP2P":64.17,"AlloProfClusteringS2S":38.17,"HALClusteringS2S":24.09,"MLSUMClusteringP2P (fr)":43.8,"MLSUMClusteringS2S (fr)":37.75,"MasakhaNEWSClusteringP2P (fra)":40.8,"MasakhaNEWSClusteringS2S (fra)":28.8} -{"index":68,"Rank":31,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":39.11,"AlloProfClusteringP2P":54.49,"AlloProfClusteringS2S":44.79,"HALClusteringS2S":23.97,"MLSUMClusteringP2P (fr)":40.55,"MLSUMClusteringS2S (fr)":37.53,"MasakhaNEWSClusteringP2P (fra)":41.57,"MasakhaNEWSClusteringS2S (fra)":30.88} -{"index":47,"Rank":32,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":38.93,"AlloProfClusteringP2P":60.89,"AlloProfClusteringS2S":32.52,"HALClusteringS2S":18.95,"MLSUMClusteringP2P (fr)":43.2,"MLSUMClusteringS2S (fr)":37.61,"MasakhaNEWSClusteringP2P (fra)":40.12,"MasakhaNEWSClusteringS2S (fra)":39.22} -{"index":14,"Rank":33,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.91,"AlloProfClusteringP2P":61.96,"AlloProfClusteringS2S":31.36,"HALClusteringS2S":17.31,"MLSUMClusteringP2P (fr)":42.8,"MLSUMClusteringS2S (fr)":32.72,"MasakhaNEWSClusteringP2P (fra)":56.81,"MasakhaNEWSClusteringS2S (fra)":29.41} -{"index":45,"Rank":34,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":38.7,"AlloProfClusteringP2P":62.99,"AlloProfClusteringS2S":32.26,"HALClusteringS2S":22.44,"MLSUMClusteringP2P (fr)":44.04,"MLSUMClusteringS2S (fr)":37.65,"MasakhaNEWSClusteringP2P (fra)":40.94,"MasakhaNEWSClusteringS2S (fra)":30.56} -{"index":63,"Rank":35,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":38.7,"AlloProfClusteringP2P":55.95,"AlloProfClusteringS2S":35.39,"HALClusteringS2S":18.2,"MLSUMClusteringP2P (fr)":40.17,"MLSUMClusteringS2S (fr)":34.65,"MasakhaNEWSClusteringP2P (fra)":53.76,"MasakhaNEWSClusteringS2S (fra)":32.76} -{"index":23,"Rank":36,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.61,"AlloProfClusteringP2P":55.52,"AlloProfClusteringS2S":35.8,"HALClusteringS2S":23.14,"MLSUMClusteringP2P (fr)":40.31,"MLSUMClusteringS2S (fr)":40.05,"MasakhaNEWSClusteringP2P (fra)":45.03,"MasakhaNEWSClusteringS2S (fra)":30.39} -{"index":67,"Rank":37,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":38.35,"AlloProfClusteringP2P":56.06,"AlloProfClusteringS2S":42.16,"HALClusteringS2S":23.21,"MLSUMClusteringP2P (fr)":39.97,"MLSUMClusteringS2S (fr)":36.55,"MasakhaNEWSClusteringP2P (fra)":36.58,"MasakhaNEWSClusteringS2S (fra)":33.9} -{"index":57,"Rank":38,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":38.32,"AlloProfClusteringP2P":54.78,"AlloProfClusteringS2S":31.6,"HALClusteringS2S":20.62,"MLSUMClusteringP2P (fr)":42.09,"MLSUMClusteringS2S (fr)":34.84,"MasakhaNEWSClusteringP2P (fra)":46.16,"MasakhaNEWSClusteringS2S (fra)":38.13} -{"index":50,"Rank":39,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.9,"AlloProfClusteringP2P":56.73,"AlloProfClusteringS2S":38.2,"HALClusteringS2S":24.13,"MLSUMClusteringP2P (fr)":42.12,"MLSUMClusteringS2S (fr)":36.69,"MasakhaNEWSClusteringP2P (fra)":34.61,"MasakhaNEWSClusteringS2S (fra)":32.81} -{"index":77,"Rank":40,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.35,"AlloProfClusteringP2P":54.21,"AlloProfClusteringS2S":37.95,"HALClusteringS2S":18.94,"MLSUMClusteringP2P (fr)":41.02,"MLSUMClusteringS2S (fr)":37.97,"MasakhaNEWSClusteringP2P (fra)":24.09,"MasakhaNEWSClusteringS2S (fra)":40.24} -{"index":35,"Rank":41,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":35.44,"AlloProfClusteringP2P":59.09,"AlloProfClusteringS2S":38.92,"HALClusteringS2S":20.22,"MLSUMClusteringP2P (fr)":35.98,"MLSUMClusteringS2S (fr)":27.05,"MasakhaNEWSClusteringP2P (fra)":36.03,"MasakhaNEWSClusteringS2S (fra)":30.77} -{"index":48,"Rank":42,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":35.04,"AlloProfClusteringP2P":62.22,"AlloProfClusteringS2S":27.06,"HALClusteringS2S":13.86,"MLSUMClusteringP2P (fr)":44.11,"MLSUMClusteringS2S (fr)":30.47,"MasakhaNEWSClusteringP2P (fra)":40.2,"MasakhaNEWSClusteringS2S (fra)":27.35} -{"index":59,"Rank":43,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":34.73,"AlloProfClusteringP2P":51.83,"AlloProfClusteringS2S":32.07,"HALClusteringS2S":18.84,"MLSUMClusteringP2P (fr)":36.74,"MLSUMClusteringS2S (fr)":28.12,"MasakhaNEWSClusteringP2P (fra)":34.92,"MasakhaNEWSClusteringS2S (fra)":40.58} -{"index":17,"Rank":44,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":34.09,"AlloProfClusteringP2P":53.16,"AlloProfClusteringS2S":43.43,"HALClusteringS2S":20.26,"MLSUMClusteringP2P (fr)":41.22,"MLSUMClusteringS2S (fr)":31.88,"MasakhaNEWSClusteringP2P (fra)":24.23,"MasakhaNEWSClusteringS2S (fra)":24.46} -{"index":18,"Rank":45,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":33.96,"AlloProfClusteringP2P":53.49,"AlloProfClusteringS2S":43.1,"HALClusteringS2S":19.78,"MLSUMClusteringP2P (fr)":40.73,"MLSUMClusteringS2S (fr)":31.94,"MasakhaNEWSClusteringP2P (fra)":24.23,"MasakhaNEWSClusteringS2S (fra)":24.46} -{"index":16,"Rank":46,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":33.94,"AlloProfClusteringP2P":53.22,"AlloProfClusteringS2S":42.92,"HALClusteringS2S":19.94,"MLSUMClusteringP2P (fr)":40.96,"MLSUMClusteringS2S (fr)":31.87,"MasakhaNEWSClusteringP2P (fra)":24.23,"MasakhaNEWSClusteringS2S (fra)":24.46} -{"index":41,"Rank":47,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":33.82,"AlloProfClusteringP2P":51.5,"AlloProfClusteringS2S":43.06,"HALClusteringS2S":20.81,"MLSUMClusteringP2P (fr)":40.9,"MLSUMClusteringS2S (fr)":31.8,"MasakhaNEWSClusteringP2P (fra)":24.23,"MasakhaNEWSClusteringS2S (fra)":24.46} -{"index":58,"Rank":48,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":33.75,"AlloProfClusteringP2P":46.03,"AlloProfClusteringS2S":31.83,"HALClusteringS2S":19.58,"MLSUMClusteringP2P (fr)":34.35,"MLSUMClusteringS2S (fr)":29.3,"MasakhaNEWSClusteringP2P (fra)":42.72,"MasakhaNEWSClusteringS2S (fra)":32.47} -{"index":75,"Rank":49,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":33.66,"AlloProfClusteringP2P":49.11,"AlloProfClusteringS2S":32.72,"HALClusteringS2S":16.19,"MLSUMClusteringP2P (fr)":36.19,"MLSUMClusteringS2S (fr)":30.39,"MasakhaNEWSClusteringP2P (fra)":38.51,"MasakhaNEWSClusteringS2S (fra)":32.51} -{"index":66,"Rank":50,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":32.84,"AlloProfClusteringP2P":49.13,"AlloProfClusteringS2S":26.16,"HALClusteringS2S":12.49,"MLSUMClusteringP2P (fr)":35.15,"MLSUMClusteringS2S (fr)":25.95,"MasakhaNEWSClusteringP2P (fra)":53.73,"MasakhaNEWSClusteringS2S (fra)":27.27} -{"index":5,"Rank":51,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":29.92,"AlloProfClusteringP2P":48.45,"AlloProfClusteringS2S":25.81,"HALClusteringS2S":11.52,"MLSUMClusteringP2P (fr)":34.53,"MLSUMClusteringS2S (fr)":27.35,"MasakhaNEWSClusteringP2P (fra)":32.04,"MasakhaNEWSClusteringS2S (fra)":29.77} -{"index":49,"Rank":52,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.03,"AlloProfClusteringP2P":53.57,"AlloProfClusteringS2S":22.13,"HALClusteringS2S":7.68,"MLSUMClusteringP2P (fr)":36.43,"MLSUMClusteringS2S (fr)":25.26,"MasakhaNEWSClusteringP2P (fra)":37.57,"MasakhaNEWSClusteringS2S (fra)":20.58} -{"index":79,"Rank":53,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":28.62,"AlloProfClusteringP2P":56.54,"AlloProfClusteringS2S":21.18,"HALClusteringS2S":5.94,"MLSUMClusteringP2P (fr)":42.67,"MLSUMClusteringS2S (fr)":18.5,"MasakhaNEWSClusteringP2P (fra)":34.02,"MasakhaNEWSClusteringS2S (fra)":21.52} -{"index":78,"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":28.42,"AlloProfClusteringP2P":52.24,"AlloProfClusteringS2S":20.37,"HALClusteringS2S":8.68,"MLSUMClusteringP2P (fr)":40.44,"MLSUMClusteringS2S (fr)":24.14,"MasakhaNEWSClusteringP2P (fra)":29.29,"MasakhaNEWSClusteringS2S (fra)":23.76} -{"index":38,"Rank":55,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":27.18,"AlloProfClusteringP2P":52.86,"AlloProfClusteringS2S":14.46,"HALClusteringS2S":3.85,"MLSUMClusteringP2P (fr)":39.06,"MLSUMClusteringS2S (fr)":17.13,"MasakhaNEWSClusteringP2P (fra)":41.61,"MasakhaNEWSClusteringS2S (fra)":21.26} -{"index":40,"Rank":56,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":25.11,"AlloProfClusteringP2P":40.85,"AlloProfClusteringS2S":21.76,"HALClusteringS2S":5.26,"MLSUMClusteringP2P (fr)":38.09,"MLSUMClusteringS2S (fr)":18.71,"MasakhaNEWSClusteringP2P (fra)":26.43,"MasakhaNEWSClusteringS2S (fra)":24.68} -{"index":39,"Rank":57,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":22.45,"AlloProfClusteringP2P":43.2,"AlloProfClusteringS2S":12.94,"HALClusteringS2S":1.8,"MLSUMClusteringP2P (fr)":33.22,"MLSUMClusteringS2S (fr)":14.9,"MasakhaNEWSClusteringP2P (fra)":28.49,"MasakhaNEWSClusteringS2S (fra)":22.58} -{"index":34,"Rank":71,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AlloProfClusteringP2P":76.06,"AlloProfClusteringS2S":"","HALClusteringS2S":30.83,"MLSUMClusteringP2P (fr)":50.03,"MLSUMClusteringS2S (fr)":"","MasakhaNEWSClusteringP2P (fra)":60.19,"MasakhaNEWSClusteringS2S (fra)":""} -{"index":55,"Rank":74,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AlloProfClusteringP2P":70.55,"AlloProfClusteringS2S":"","HALClusteringS2S":28.3,"MLSUMClusteringP2P (fr)":45.27,"MLSUMClusteringS2S (fr)":"","MasakhaNEWSClusteringP2P (fra)":71.04,"MasakhaNEWSClusteringS2S (fra)":""} -{"index":56,"Rank":75,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AlloProfClusteringP2P":76.06,"AlloProfClusteringS2S":"","HALClusteringS2S":30.83,"MLSUMClusteringP2P (fr)":50.03,"MLSUMClusteringS2S (fr)":"","MasakhaNEWSClusteringP2P (fra)":60.19,"MasakhaNEWSClusteringS2S (fra)":""} +{"Rank":1,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.18,"AlloProfClusteringP2P":64.83,"AlloProfClusteringS2S":53.52,"HALClusteringS2S":26.18} +{"Rank":2,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.69,"AlloProfClusteringP2P":65.37,"AlloProfClusteringS2S":47.03,"HALClusteringS2S":27.67} +{"Rank":3,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.58,"AlloProfClusteringP2P":61.63,"AlloProfClusteringS2S":50.67,"HALClusteringS2S":27.44} +{"Rank":4,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.79,"AlloProfClusteringP2P":62.01,"AlloProfClusteringS2S":49.2,"HALClusteringS2S":26.17} +{"Rank":5,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.38,"AlloProfClusteringP2P":62.5,"AlloProfClusteringS2S":44.28,"HALClusteringS2S":26.36} +{"Rank":6,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":42.88,"AlloProfClusteringP2P":62.69,"AlloProfClusteringS2S":42.06,"HALClusteringS2S":23.9} +{"Rank":7,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":41.96,"AlloProfClusteringP2P":60.98,"AlloProfClusteringS2S":43.5,"HALClusteringS2S":21.4} +{"Rank":8,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.48,"AlloProfClusteringP2P":57.96,"AlloProfClusteringS2S":41.65,"HALClusteringS2S":24.84} +{"Rank":9,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":41.08,"AlloProfClusteringP2P":54.49,"AlloProfClusteringS2S":44.79,"HALClusteringS2S":23.97} +{"Rank":10,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":40.48,"AlloProfClusteringP2P":56.06,"AlloProfClusteringS2S":42.16,"HALClusteringS2S":23.21} +{"Rank":11,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":40.47,"AlloProfClusteringP2P":60.37,"AlloProfClusteringS2S":40.76,"HALClusteringS2S":20.28} +{"Rank":12,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":40.11,"AlloProfClusteringP2P":61.82,"AlloProfClusteringS2S":39.78,"HALClusteringS2S":18.73} +{"Rank":13,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":40.01,"AlloProfClusteringP2P":64.12,"AlloProfClusteringS2S":32.52,"HALClusteringS2S":23.4} +{"Rank":14,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.87,"AlloProfClusteringP2P":63.53,"AlloProfClusteringS2S":36.18,"HALClusteringS2S":19.9} +{"Rank":15,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":39.41,"AlloProfClusteringP2P":59.09,"AlloProfClusteringS2S":38.92,"HALClusteringS2S":20.22} +{"Rank":16,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":39.23,"AlloProfClusteringP2P":62.99,"AlloProfClusteringS2S":32.26,"HALClusteringS2S":22.44} +{"Rank":17,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":39.18,"AlloProfClusteringP2P":62.09,"AlloProfClusteringS2S":32.98,"HALClusteringS2S":22.48} +{"Rank":18,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":38.95,"AlloProfClusteringP2P":53.16,"AlloProfClusteringS2S":43.43,"HALClusteringS2S":20.26} +{"Rank":19,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":38.87,"AlloProfClusteringP2P":60.66,"AlloProfClusteringS2S":35.05,"HALClusteringS2S":20.9} +{"Rank":20,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":38.79,"AlloProfClusteringP2P":53.49,"AlloProfClusteringS2S":43.1,"HALClusteringS2S":19.78} +{"Rank":21,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":38.69,"AlloProfClusteringP2P":53.22,"AlloProfClusteringS2S":42.92,"HALClusteringS2S":19.94} +{"Rank":22,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":38.46,"AlloProfClusteringP2P":51.5,"AlloProfClusteringS2S":43.06,"HALClusteringS2S":20.81} +{"Rank":23,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.9,"AlloProfClusteringP2P":56.9,"AlloProfClusteringS2S":37.84,"HALClusteringS2S":18.95} +{"Rank":24,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":37.45,"AlloProfClusteringP2P":60.89,"AlloProfClusteringS2S":32.52,"HALClusteringS2S":18.95} +{"Rank":25,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":37.36,"AlloProfClusteringP2P":58.44,"AlloProfClusteringS2S":35.93,"HALClusteringS2S":17.72} +{"Rank":26,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.03,"AlloProfClusteringP2P":54.21,"AlloProfClusteringS2S":37.95,"HALClusteringS2S":18.94} +{"Rank":27,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.88,"AlloProfClusteringP2P":61.96,"AlloProfClusteringS2S":31.36,"HALClusteringS2S":17.31} +{"Rank":28,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":36.51,"AlloProfClusteringP2P":55.95,"AlloProfClusteringS2S":35.39,"HALClusteringS2S":18.2} +{"Rank":29,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":36.29,"AlloProfClusteringP2P":61.06,"AlloProfClusteringS2S":28.12,"HALClusteringS2S":19.69} +{"Rank":30,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":35.67,"AlloProfClusteringP2P":54.78,"AlloProfClusteringS2S":31.6,"HALClusteringS2S":20.62} +{"Rank":31,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":34.38,"AlloProfClusteringP2P":62.22,"AlloProfClusteringS2S":27.06,"HALClusteringS2S":13.86} +{"Rank":32,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":34.25,"AlloProfClusteringP2P":51.83,"AlloProfClusteringS2S":32.07,"HALClusteringS2S":18.84} +{"Rank":33,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":32.67,"AlloProfClusteringP2P":49.11,"AlloProfClusteringS2S":32.72,"HALClusteringS2S":16.19} +{"Rank":34,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":32.48,"AlloProfClusteringP2P":46.03,"AlloProfClusteringS2S":31.83,"HALClusteringS2S":19.58} +{"Rank":35,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":29.26,"AlloProfClusteringP2P":49.13,"AlloProfClusteringS2S":26.16,"HALClusteringS2S":12.49} +{"Rank":36,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":28.59,"AlloProfClusteringP2P":48.45,"AlloProfClusteringS2S":25.81,"HALClusteringS2S":11.52} +{"Rank":37,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":27.89,"AlloProfClusteringP2P":56.54,"AlloProfClusteringS2S":21.18,"HALClusteringS2S":5.94} +{"Rank":38,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":27.79,"AlloProfClusteringP2P":53.57,"AlloProfClusteringS2S":22.13,"HALClusteringS2S":7.68} +{"Rank":39,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":27.1,"AlloProfClusteringP2P":52.24,"AlloProfClusteringS2S":20.37,"HALClusteringS2S":8.68} +{"Rank":40,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":23.72,"AlloProfClusteringP2P":52.86,"AlloProfClusteringS2S":14.46,"HALClusteringS2S":3.85} +{"Rank":41,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":22.62,"AlloProfClusteringP2P":40.85,"AlloProfClusteringS2S":21.76,"HALClusteringS2S":5.26} +{"Rank":42,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":19.31,"AlloProfClusteringP2P":43.2,"AlloProfClusteringS2S":12.94,"HALClusteringS2S":1.8} +{"Rank":43,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null} +{"Rank":44,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null} +{"Rank":45,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null} +{"Rank":46,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null} +{"Rank":47,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null} +{"Rank":48,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null} +{"Rank":49,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null} +{"Rank":50,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null} +{"Rank":51,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null} +{"Rank":52,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null} +{"Rank":53,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null} +{"Rank":54,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null} +{"Rank":55,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null} +{"Rank":56,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null} diff --git a/all_data_tasks/18/default.jsonl b/all_data_tasks/18/default.jsonl index fd34c2b730d79240b61bfbbc7898cd0e69ec062a..c2c67a443c29a551d3ed5f619dbb294c2a64b2a3 100644 --- a/all_data_tasks/18/default.jsonl +++ b/all_data_tasks/18/default.jsonl @@ -1,61 +1,57 @@ -{"index":9,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":90.43,"OpusparcusPC (fr)":100.0,"PawsXPairClassification (fr)":80.86} -{"index":34,"Rank":2,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":90.43,"OpusparcusPC (fr)":100.0,"PawsXPairClassification (fr)":80.86} -{"index":56,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":90.43,"OpusparcusPC (fr)":100.0,"PawsXPairClassification (fr)":80.86} -{"index":55,"Rank":4,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.88,"OpusparcusPC (fr)":100.0,"PawsXPairClassification (fr)":73.77} -{"index":8,"Rank":5,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.88,"OpusparcusPC (fr)":100.0,"PawsXPairClassification (fr)":73.77} -{"index":10,"Rank":6,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.07,"OpusparcusPC (fr)":100.0,"PawsXPairClassification (fr)":70.14} -{"index":5,"Rank":7,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":81.65,"OpusparcusPC (fr)":93.77,"PawsXPairClassification (fr)":69.53} -{"index":7,"Rank":8,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.54,"OpusparcusPC (fr)":100.0,"PawsXPairClassification (fr)":61.07} -{"index":52,"Rank":9,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.23,"OpusparcusPC (fr)":93.48,"PawsXPairClassification (fr)":66.98} -{"index":54,"Rank":10,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.15,"OpusparcusPC (fr)":93.37,"PawsXPairClassification (fr)":66.92} -{"index":22,"Rank":11,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.64,"OpusparcusPC (fr)":94.77,"PawsXPairClassification (fr)":64.51} -{"index":53,"Rank":12,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.57,"OpusparcusPC (fr)":93.72,"PawsXPairClassification (fr)":65.42} -{"index":72,"Rank":13,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":78.96,"OpusparcusPC (fr)":93.94,"PawsXPairClassification (fr)":63.98} -{"index":75,"Rank":14,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.81,"OpusparcusPC (fr)":92.04,"PawsXPairClassification (fr)":65.57} -{"index":4,"Rank":15,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.66,"OpusparcusPC (fr)":93.68,"PawsXPairClassification (fr)":63.64} -{"index":23,"Rank":16,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.3,"OpusparcusPC (fr)":93.15,"PawsXPairClassification (fr)":63.44} -{"index":15,"Rank":17,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.67,"OpusparcusPC (fr)":94.08,"PawsXPairClassification (fr)":61.26} -{"index":71,"Rank":18,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":77.5,"OpusparcusPC (fr)":92.48,"PawsXPairClassification (fr)":62.52} -{"index":0,"Rank":19,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.32,"OpusparcusPC (fr)":92.61,"PawsXPairClassification (fr)":62.02} -{"index":3,"Rank":20,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.3,"OpusparcusPC (fr)":93.06,"PawsXPairClassification (fr)":61.54} -{"index":30,"Rank":21,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":77.28,"OpusparcusPC (fr)":91.42,"PawsXPairClassification (fr)":63.13} -{"index":81,"Rank":22,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.14,"OpusparcusPC (fr)":94.12,"PawsXPairClassification (fr)":60.16} -{"index":36,"Rank":23,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":77.11,"OpusparcusPC (fr)":94.63,"PawsXPairClassification (fr)":59.59} -{"index":29,"Rank":24,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.1,"OpusparcusPC (fr)":94.02,"PawsXPairClassification (fr)":60.19} -{"index":50,"Rank":25,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.1,"OpusparcusPC (fr)":93.34,"PawsXPairClassification (fr)":60.85} -{"index":21,"Rank":26,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.01,"OpusparcusPC (fr)":93.64,"PawsXPairClassification (fr)":60.38} -{"index":80,"Rank":27,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.97,"OpusparcusPC (fr)":93.18,"PawsXPairClassification (fr)":60.76} -{"index":2,"Rank":28,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.85,"OpusparcusPC (fr)":92.87,"PawsXPairClassification (fr)":60.83} -{"index":45,"Rank":29,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":76.19,"OpusparcusPC (fr)":93.89,"PawsXPairClassification (fr)":58.5} -{"index":43,"Rank":30,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":76.08,"OpusparcusPC (fr)":88.5,"PawsXPairClassification (fr)":63.65} -{"index":68,"Rank":31,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":75.8,"OpusparcusPC (fr)":93.45,"PawsXPairClassification (fr)":58.14} -{"index":24,"Rank":32,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.61,"OpusparcusPC (fr)":92.6,"PawsXPairClassification (fr)":58.63} -{"index":70,"Rank":33,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":75.39,"OpusparcusPC (fr)":91.19,"PawsXPairClassification (fr)":59.59} -{"index":44,"Rank":34,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.82,"OpusparcusPC (fr)":92.72,"PawsXPairClassification (fr)":56.93} -{"index":35,"Rank":35,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":74.74,"OpusparcusPC (fr)":92.05,"PawsXPairClassification (fr)":57.44} -{"index":67,"Rank":36,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":74.47,"OpusparcusPC (fr)":92.01,"PawsXPairClassification (fr)":56.94} -{"index":1,"Rank":37,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.36,"OpusparcusPC (fr)":89.76,"PawsXPairClassification (fr)":58.96} -{"index":57,"Rank":38,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":74.3,"OpusparcusPC (fr)":93.96,"PawsXPairClassification (fr)":54.63} -{"index":14,"Rank":39,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.12,"OpusparcusPC (fr)":90.92,"PawsXPairClassification (fr)":57.32} -{"index":47,"Rank":40,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":74.1,"OpusparcusPC (fr)":92.52,"PawsXPairClassification (fr)":55.68} -{"index":48,"Rank":41,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.76,"OpusparcusPC (fr)":85.54,"PawsXPairClassification (fr)":61.99} -{"index":77,"Rank":42,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.5,"OpusparcusPC (fr)":93.38,"PawsXPairClassification (fr)":53.62} -{"index":66,"Rank":43,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":72.72,"OpusparcusPC (fr)":88.07,"PawsXPairClassification (fr)":57.36} -{"index":69,"Rank":44,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":72.38,"OpusparcusPC (fr)":89.4,"PawsXPairClassification (fr)":55.35} -{"index":76,"Rank":45,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.92,"OpusparcusPC (fr)":91.46,"PawsXPairClassification (fr)":52.39} -{"index":63,"Rank":46,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":71.57,"OpusparcusPC (fr)":92.07,"PawsXPairClassification (fr)":51.08} -{"index":59,"Rank":47,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":70.96,"OpusparcusPC (fr)":86.53,"PawsXPairClassification (fr)":55.4} -{"index":49,"Rank":48,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.9,"OpusparcusPC (fr)":82.1,"PawsXPairClassification (fr)":59.69} -{"index":42,"Rank":49,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":70.32,"OpusparcusPC (fr)":87.43,"PawsXPairClassification (fr)":53.22} -{"index":16,"Rank":50,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":70.1,"OpusparcusPC (fr)":86.79,"PawsXPairClassification (fr)":53.4} -{"index":18,"Rank":51,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":70.09,"OpusparcusPC (fr)":86.79,"PawsXPairClassification (fr)":53.39} -{"index":17,"Rank":52,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":70.08,"OpusparcusPC (fr)":86.78,"PawsXPairClassification (fr)":53.38} -{"index":41,"Rank":53,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":70.08,"OpusparcusPC (fr)":86.77,"PawsXPairClassification (fr)":53.39} -{"index":79,"Rank":54,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":68.56,"OpusparcusPC (fr)":83.73,"PawsXPairClassification (fr)":53.38} -{"index":78,"Rank":55,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":68.4,"OpusparcusPC (fr)":85.45,"PawsXPairClassification (fr)":51.35} -{"index":39,"Rank":56,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":67.39,"OpusparcusPC (fr)":82.0,"PawsXPairClassification (fr)":52.78} -{"index":38,"Rank":57,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":67.02,"OpusparcusPC (fr)":82.15,"PawsXPairClassification (fr)":51.89} -{"index":40,"Rank":58,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":64.46,"OpusparcusPC (fr)":74.78,"PawsXPairClassification (fr)":54.14} -{"index":37,"Rank":72,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","OpusparcusPC (fr)":"","PawsXPairClassification (fr)":71.36} -{"index":51,"Rank":74,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","OpusparcusPC (fr)":"","PawsXPairClassification (fr)":66.96} -{"index":82,"Rank":83,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","OpusparcusPC (fr)":94.45,"PawsXPairClassification (fr)":""} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null} +{"Rank":3,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null} +{"Rank":4,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null} +{"Rank":5,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null} +{"Rank":6,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":null} +{"Rank":7,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null} +{"Rank":8,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":9,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":10,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":12,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":13,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null} +{"Rank":14,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":15,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":16,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null} +{"Rank":17,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null} +{"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null} +{"Rank":19,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null} +{"Rank":20,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null} +{"Rank":21,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null} +{"Rank":22,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null} +{"Rank":23,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null} +{"Rank":24,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null} +{"Rank":25,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null} +{"Rank":26,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null} +{"Rank":27,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null} +{"Rank":28,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null} +{"Rank":29,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null} +{"Rank":30,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null} +{"Rank":31,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null} +{"Rank":32,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null} +{"Rank":33,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null} +{"Rank":34,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null} +{"Rank":35,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":36,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null} +{"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null} +{"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null} +{"Rank":40,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null} +{"Rank":41,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null} +{"Rank":42,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null} +{"Rank":43,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null} +{"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null} +{"Rank":45,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null} +{"Rank":46,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null} +{"Rank":47,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null} +{"Rank":48,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null} +{"Rank":49,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null} +{"Rank":50,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null} +{"Rank":51,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":52,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":53,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null} +{"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null} +{"Rank":56,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null} diff --git a/all_data_tasks/19/default.jsonl b/all_data_tasks/19/default.jsonl index d1b83545bc1e4a9c15b57006770ee1e4ef4bd884..80ffbd841925016e1c5659a97c6e96210c32ad7a 100644 --- a/all_data_tasks/19/default.jsonl +++ b/all_data_tasks/19/default.jsonl @@ -1,58 +1,57 @@ -{"index":10,"Rank":1,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.22,"AlloprofReranking":78.62,"SyntecReranking":91.83} -{"index":55,"Rank":2,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.76,"AlloprofReranking":73.49,"SyntecReranking":94.03} -{"index":8,"Rank":3,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.76,"AlloprofReranking":73.49,"SyntecReranking":94.03} -{"index":4,"Rank":4,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.59,"AlloprofReranking":74.78,"SyntecReranking":90.4} -{"index":3,"Rank":5,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.06,"AlloprofReranking":72.92,"SyntecReranking":91.2} -{"index":0,"Rank":6,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.46,"AlloprofReranking":72.36,"SyntecReranking":88.57} -{"index":23,"Rank":7,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.35,"AlloprofReranking":73.1,"SyntecReranking":87.6} -{"index":22,"Rank":8,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.79,"AlloprofReranking":73.63,"SyntecReranking":85.95} -{"index":2,"Rank":9,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.78,"AlloprofReranking":70.79,"SyntecReranking":86.77} -{"index":56,"Rank":10,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.7,"AlloprofReranking":73.08,"SyntecReranking":84.32} -{"index":34,"Rank":11,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.7,"AlloprofReranking":73.08,"SyntecReranking":84.32} -{"index":9,"Rank":12,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":78.7,"AlloprofReranking":73.08,"SyntecReranking":84.32} -{"index":21,"Rank":13,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.14,"AlloprofReranking":70.46,"SyntecReranking":83.83} -{"index":72,"Rank":14,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":76.76,"AlloprofReranking":68.36,"SyntecReranking":85.15} -{"index":29,"Rank":15,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.58,"AlloprofReranking":64.88,"SyntecReranking":88.28} -{"index":54,"Rank":16,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.43,"AlloprofReranking":68.79,"SyntecReranking":84.07} -{"index":24,"Rank":17,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.0,"AlloprofReranking":69.5,"SyntecReranking":82.5} -{"index":80,"Rank":18,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.75,"AlloprofReranking":68.73,"SyntecReranking":82.77} -{"index":53,"Rank":19,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.48,"AlloprofReranking":68.31,"SyntecReranking":82.65} -{"index":50,"Rank":20,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.3,"AlloprofReranking":65.17,"SyntecReranking":85.43} -{"index":52,"Rank":21,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.2,"AlloprofReranking":67.24,"SyntecReranking":83.17} -{"index":71,"Rank":22,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":73.18,"AlloprofReranking":63.3,"SyntecReranking":83.07} -{"index":1,"Rank":23,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.1,"AlloprofReranking":63.54,"SyntecReranking":82.65} -{"index":36,"Rank":24,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":72.89,"AlloprofReranking":57.62,"SyntecReranking":88.15} -{"index":45,"Rank":25,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":72.14,"AlloprofReranking":57.37,"SyntecReranking":86.9} -{"index":44,"Rank":26,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":71.76,"AlloprofReranking":58.1,"SyntecReranking":85.43} -{"index":47,"Rank":27,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":71.44,"AlloprofReranking":56.17,"SyntecReranking":86.7} -{"index":14,"Rank":28,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.82,"AlloprofReranking":51.6,"SyntecReranking":88.03} -{"index":70,"Rank":29,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":68.88,"AlloprofReranking":57.99,"SyntecReranking":79.77} -{"index":68,"Rank":30,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":68.78,"AlloprofReranking":54.34,"SyntecReranking":83.23} -{"index":15,"Rank":31,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.36,"AlloprofReranking":51.01,"SyntecReranking":85.72} -{"index":30,"Rank":32,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":67.95,"AlloprofReranking":53.0,"SyntecReranking":82.9} -{"index":77,"Rank":33,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.26,"AlloprofReranking":55.39,"SyntecReranking":77.13} -{"index":76,"Rank":34,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.04,"AlloprofReranking":56.23,"SyntecReranking":73.85} -{"index":35,"Rank":35,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":64.22,"AlloprofReranking":48.68,"SyntecReranking":79.75} -{"index":69,"Rank":36,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":64.08,"AlloprofReranking":50.12,"SyntecReranking":78.05} -{"index":63,"Rank":37,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":63.28,"AlloprofReranking":51.77,"SyntecReranking":74.78} -{"index":43,"Rank":38,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":62.2,"AlloprofReranking":47.36,"SyntecReranking":77.05} -{"index":67,"Rank":39,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":62.02,"AlloprofReranking":49.01,"SyntecReranking":75.03} -{"index":57,"Rank":40,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":61.4,"AlloprofReranking":49.51,"SyntecReranking":73.28} -{"index":75,"Rank":41,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.88,"AlloprofReranking":51.48,"SyntecReranking":70.28} -{"index":66,"Rank":42,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":52.68,"AlloprofReranking":40.28,"SyntecReranking":65.08} -{"index":42,"Rank":43,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":52.62,"AlloprofReranking":38.85,"SyntecReranking":66.4} -{"index":48,"Rank":44,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.86,"AlloprofReranking":39.13,"SyntecReranking":62.58} -{"index":7,"Rank":45,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.98,"AlloprofReranking":37.09,"SyntecReranking":62.87} -{"index":39,"Rank":46,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":45.86,"AlloprofReranking":34.55,"SyntecReranking":57.18} -{"index":59,"Rank":47,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":45.63,"AlloprofReranking":31.69,"SyntecReranking":59.57} -{"index":5,"Rank":48,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":45.61,"AlloprofReranking":35.29,"SyntecReranking":55.93} -{"index":38,"Rank":49,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":45.34,"AlloprofReranking":34.81,"SyntecReranking":55.88} -{"index":18,"Rank":50,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":44.75,"AlloprofReranking":36.25,"SyntecReranking":53.25} -{"index":41,"Rank":51,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":44.74,"AlloprofReranking":36.23,"SyntecReranking":53.25} -{"index":17,"Rank":52,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":44.73,"AlloprofReranking":36.21,"SyntecReranking":53.25} -{"index":16,"Rank":53,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":44.73,"AlloprofReranking":36.21,"SyntecReranking":53.25} -{"index":49,"Rank":54,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.82,"AlloprofReranking":28.75,"SyntecReranking":50.88} -{"index":79,"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":39.01,"AlloprofReranking":28.62,"SyntecReranking":49.4} -{"index":78,"Rank":56,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":34.66,"AlloprofReranking":25.58,"SyntecReranking":43.75} -{"index":40,"Rank":57,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":34.54,"AlloprofReranking":26.29,"SyntecReranking":42.8} -{"index":81,"Rank":82,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AlloprofReranking":"","SyntecReranking":89.87} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":74.79,"AlloprofReranking":57.37,"AlloprofReranking (fra-Latn)":69.44,"SyntecReranking":86.9,"SyntecReranking (fra-Latn)":85.45} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":73.68,"AlloprofReranking":58.1,"AlloprofReranking (fra-Latn)":65.9,"SyntecReranking":85.43,"SyntecReranking (fra-Latn)":85.31} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":72.12,"AlloprofReranking":56.17,"AlloprofReranking (fra-Latn)":64.41,"SyntecReranking":86.7,"SyntecReranking (fra-Latn)":81.22} +{"Rank":4,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":71.44,"AlloprofReranking":54.34,"AlloprofReranking (fra-Latn)":67.2,"SyntecReranking":83.23,"SyntecReranking (fra-Latn)":80.97} +{"Rank":5,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":64.74,"AlloprofReranking":49.01,"AlloprofReranking (fra-Latn)":62.42,"SyntecReranking":75.03,"SyntecReranking (fra-Latn)":72.5} +{"Rank":6,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":61.44,"AlloprofReranking":49.51,"AlloprofReranking (fra-Latn)":55.37,"SyntecReranking":73.28,"SyntecReranking (fra-Latn)":67.62} +{"Rank":7,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":55.3,"AlloprofReranking":31.69,"AlloprofReranking (fra-Latn)":62.62,"SyntecReranking":59.57,"SyntecReranking (fra-Latn)":67.31} +{"Rank":8,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":72.36,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":88.57,"SyntecReranking (fra-Latn)":null} +{"Rank":9,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":63.54,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":82.65,"SyntecReranking (fra-Latn)":null} +{"Rank":10,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":70.79,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":86.77,"SyntecReranking (fra-Latn)":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":72.92,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":91.2,"SyntecReranking (fra-Latn)":null} +{"Rank":12,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":74.78,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":90.4,"SyntecReranking (fra-Latn)":null} +{"Rank":13,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"AlloprofReranking":35.29,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":55.93,"SyntecReranking (fra-Latn)":null} +{"Rank":14,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":51.6,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":88.03,"SyntecReranking (fra-Latn)":null} +{"Rank":15,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":51.01,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":85.72,"SyntecReranking (fra-Latn)":null} +{"Rank":16,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":17,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":19,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofReranking":36.21,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":53.25,"SyntecReranking (fra-Latn)":null} +{"Rank":20,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofReranking":36.21,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":53.25,"SyntecReranking (fra-Latn)":null} +{"Rank":21,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofReranking":36.25,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":53.25,"SyntecReranking (fra-Latn)":null} +{"Rank":22,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":77.95,"SyntecReranking":null,"SyntecReranking (fra-Latn)":83.32} +{"Rank":23,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"AlloprofReranking":53.0,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":82.9,"SyntecReranking (fra-Latn)":null} +{"Rank":24,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":25,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofReranking":48.68,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":79.75,"SyntecReranking (fra-Latn)":null} +{"Rank":26,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"AlloprofReranking":57.62,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":88.15,"SyntecReranking (fra-Latn)":null} +{"Rank":27,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":28,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":29,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofReranking":34.81,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":55.88,"SyntecReranking (fra-Latn)":null} +{"Rank":30,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofReranking":34.55,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":57.18,"SyntecReranking (fra-Latn)":null} +{"Rank":31,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"AlloprofReranking":26.29,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":42.8,"SyntecReranking (fra-Latn)":null} +{"Rank":32,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofReranking":36.23,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":53.25,"SyntecReranking (fra-Latn)":null} +{"Rank":33,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofReranking":38.85,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":66.4,"SyntecReranking (fra-Latn)":null} +{"Rank":34,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"AlloprofReranking":47.36,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":77.05,"SyntecReranking (fra-Latn)":null} +{"Rank":35,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":39.13,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":62.58,"SyntecReranking (fra-Latn)":null} +{"Rank":36,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":28.75,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":50.88,"SyntecReranking (fra-Latn)":null} +{"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":67.01,"SyntecReranking":null,"SyntecReranking (fra-Latn)":69.17} +{"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":69.63,"SyntecReranking":null,"SyntecReranking (fra-Latn)":66.12} +{"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":40,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":41,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"AlloprofReranking":51.77,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":74.78,"SyntecReranking (fra-Latn)":null} +{"Rank":42,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":43,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"AlloprofReranking":40.28,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":65.08,"SyntecReranking (fra-Latn)":null} +{"Rank":45,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofReranking":50.12,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":78.05,"SyntecReranking (fra-Latn)":null} +{"Rank":46,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofReranking":57.99,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":79.77,"SyntecReranking (fra-Latn)":null} +{"Rank":47,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AlloprofReranking":63.3,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":83.07,"SyntecReranking (fra-Latn)":null} +{"Rank":48,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"AlloprofReranking":68.36,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":85.15,"SyntecReranking (fra-Latn)":null} +{"Rank":49,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":50,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":51,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":51.48,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":70.28,"SyntecReranking (fra-Latn)":null} +{"Rank":52,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":56.23,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":73.85,"SyntecReranking (fra-Latn)":null} +{"Rank":53,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":55.39,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":77.13,"SyntecReranking (fra-Latn)":null} +{"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"AlloprofReranking":25.58,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":43.75,"SyntecReranking (fra-Latn)":null} +{"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"AlloprofReranking":28.62,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":49.4,"SyntecReranking (fra-Latn)":null} +{"Rank":56,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":89.87,"SyntecReranking (fra-Latn)":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} diff --git a/all_data_tasks/2/default.jsonl b/all_data_tasks/2/default.jsonl index 6a6ffb62609324c2bd9a40df1f6ecf851e007295..69246aa27835d4fc3a608c7f2c5bf7d956820db8 100644 --- a/all_data_tasks/2/default.jsonl +++ b/all_data_tasks/2/default.jsonl @@ -1,206 +1,57 @@ -{"index":6,"Rank":1,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":89.24,"SprintDuplicateQuestions":94.5,"TwitterSemEval2015":86.32,"TwitterURLCorpus":86.9} -{"index":96,"Rank":2,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":88.54,"SprintDuplicateQuestions":96.31,"TwitterSemEval2015":81.52,"TwitterURLCorpus":87.78} -{"index":58,"Rank":3,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":88.35,"SprintDuplicateQuestions":96.11,"TwitterSemEval2015":81.52,"TwitterURLCorpus":87.42} -{"index":156,"Rank":4,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":88.34,"SprintDuplicateQuestions":95.66,"TwitterSemEval2015":81.62,"TwitterURLCorpus":87.75} -{"index":219,"Rank":5,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.27,"SprintDuplicateQuestions":96.52,"TwitterSemEval2015":81.35,"TwitterURLCorpus":86.94} -{"index":21,"Rank":6,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.14,"SprintDuplicateQuestions":97.23,"TwitterSemEval2015":79.34,"TwitterURLCorpus":87.84} -{"index":95,"Rank":7,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.07,"SprintDuplicateQuestions":97.62,"TwitterSemEval2015":78.57,"TwitterURLCorpus":88.03} -{"index":138,"Rank":8,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.07,"SprintDuplicateQuestions":96.04,"TwitterSemEval2015":80.58,"TwitterURLCorpus":87.58} -{"index":60,"Rank":9,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":88.03,"SprintDuplicateQuestions":96.83,"TwitterSemEval2015":80.7,"TwitterURLCorpus":86.56} -{"index":64,"Rank":10,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":87.99,"SprintDuplicateQuestions":96.82,"TwitterSemEval2015":80.6,"TwitterURLCorpus":86.56} -{"index":62,"Rank":11,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":87.79,"SprintDuplicateQuestions":95.09,"TwitterSemEval2015":81.73,"TwitterURLCorpus":86.56} -{"index":139,"Rank":12,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.74,"SprintDuplicateQuestions":95.59,"TwitterSemEval2015":80.18,"TwitterURLCorpus":87.46} -{"index":51,"Rank":13,"Model":"neural-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.68,"SprintDuplicateQuestions":96.72,"TwitterSemEval2015":79.15,"TwitterURLCorpus":87.16} -{"index":1,"Rank":14,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":87.61,"SprintDuplicateQuestions":96.26,"TwitterSemEval2015":79.04,"TwitterURLCorpus":87.53} -{"index":204,"Rank":15,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.51,"SprintDuplicateQuestions":95.32,"TwitterSemEval2015":79.64,"TwitterURLCorpus":87.58} -{"index":16,"Rank":16,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.51,"SprintDuplicateQuestions":95.32,"TwitterSemEval2015":79.64,"TwitterURLCorpus":87.58} -{"index":15,"Rank":17,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":87.38,"SprintDuplicateQuestions":95.99,"TwitterSemEval2015":79.36,"TwitterURLCorpus":86.79} -{"index":186,"Rank":18,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.37,"SprintDuplicateQuestions":96.83,"TwitterSemEval2015":79.29,"TwitterURLCorpus":85.98} -{"index":178,"Rank":19,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.34,"SprintDuplicateQuestions":94.59,"TwitterSemEval2015":79.93,"TwitterURLCorpus":87.5} -{"index":0,"Rank":20,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":87.25,"SprintDuplicateQuestions":96.49,"TwitterSemEval2015":78.23,"TwitterURLCorpus":87.04} -{"index":108,"Rank":21,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.25,"SprintDuplicateQuestions":97.24,"TwitterSemEval2015":78.17,"TwitterURLCorpus":86.33} -{"index":111,"Rank":22,"Model":"UAE-Large-V1-Q5_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.25,"SprintDuplicateQuestions":97.24,"TwitterSemEval2015":78.17,"TwitterURLCorpus":86.33} -{"index":165,"Rank":23,"Model":"UAE-Large-V1-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.25,"SprintDuplicateQuestions":97.24,"TwitterSemEval2015":78.17,"TwitterURLCorpus":86.33} -{"index":194,"Rank":24,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.2,"SprintDuplicateQuestions":96.82,"TwitterSemEval2015":78.55,"TwitterURLCorpus":86.23} -{"index":133,"Rank":25,"Model":"mxbai-embed-large-v1-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.2,"SprintDuplicateQuestions":96.82,"TwitterSemEval2015":78.55,"TwitterURLCorpus":86.23} -{"index":53,"Rank":26,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.14,"SprintDuplicateQuestions":96.97,"TwitterSemEval2015":78.29,"TwitterURLCorpus":86.16} -{"index":114,"Rank":27,"Model":"bge-large-en-v1.5-Q4_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.12,"SprintDuplicateQuestions":96.73,"TwitterSemEval2015":79.04,"TwitterURLCorpus":85.6} -{"index":22,"Rank":28,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":87.12,"SprintDuplicateQuestions":96.73,"TwitterSemEval2015":79.04,"TwitterURLCorpus":85.6} -{"index":150,"Rank":29,"Model":"bge-large-en-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.12,"SprintDuplicateQuestions":96.73,"TwitterSemEval2015":79.04,"TwitterURLCorpus":85.6} -{"index":197,"Rank":30,"Model":"b1ade-embed-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.07,"SprintDuplicateQuestions":96.87,"TwitterSemEval2015":78.24,"TwitterURLCorpus":86.11} -{"index":261,"Rank":31,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.07,"SprintDuplicateQuestions":96.87,"TwitterSemEval2015":78.24,"TwitterURLCorpus":86.11} -{"index":215,"Rank":32,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Average":86.91,"SprintDuplicateQuestions":95.94,"TwitterSemEval2015":78.73,"TwitterURLCorpus":86.05} -{"index":9,"Rank":33,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":86.87,"SprintDuplicateQuestions":98.07,"TwitterSemEval2015":74.44,"TwitterURLCorpus":88.11} -{"index":170,"Rank":34,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.85,"SprintDuplicateQuestions":96.79,"TwitterSemEval2015":78.23,"TwitterURLCorpus":85.53} -{"index":200,"Rank":35,"Model":"bge-large-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.74,"SprintDuplicateQuestions":96.49,"TwitterSemEval2015":77.8,"TwitterURLCorpus":85.94} -{"index":117,"Rank":36,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.7,"SprintDuplicateQuestions":96.54,"TwitterSemEval2015":77.6,"TwitterURLCorpus":85.96} -{"index":149,"Rank":37,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":86.62,"SprintDuplicateQuestions":94.94,"TwitterSemEval2015":77.99,"TwitterURLCorpus":86.93} -{"index":8,"Rank":38,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.57,"SprintDuplicateQuestions":96.01,"TwitterSemEval2015":76.87,"TwitterURLCorpus":86.84} -{"index":120,"Rank":39,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.55,"SprintDuplicateQuestions":96.33,"TwitterSemEval2015":77.68,"TwitterURLCorpus":85.65} -{"index":182,"Rank":40,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.55,"SprintDuplicateQuestions":96.33,"TwitterSemEval2015":77.68,"TwitterURLCorpus":85.65} -{"index":181,"Rank":41,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.55,"SprintDuplicateQuestions":96.33,"TwitterSemEval2015":77.68,"TwitterURLCorpus":85.65} -{"index":179,"Rank":42,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.55,"SprintDuplicateQuestions":96.33,"TwitterSemEval2015":77.68,"TwitterURLCorpus":85.65} -{"index":20,"Rank":43,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":86.55,"SprintDuplicateQuestions":96.33,"TwitterSemEval2015":77.68,"TwitterURLCorpus":85.65} -{"index":180,"Rank":44,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.55,"SprintDuplicateQuestions":96.33,"TwitterSemEval2015":77.68,"TwitterURLCorpus":85.65} -{"index":151,"Rank":45,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.45,"SprintDuplicateQuestions":95.69,"TwitterSemEval2015":77.67,"TwitterURLCorpus":86.0} -{"index":137,"Rank":46,"Model":"stella-base-en-v2-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.45,"SprintDuplicateQuestions":95.69,"TwitterSemEval2015":77.67,"TwitterURLCorpus":86.0} -{"index":115,"Rank":47,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.32,"SprintDuplicateQuestions":95.93,"TwitterSemEval2015":76.92,"TwitterURLCorpus":86.11} -{"index":198,"Rank":48,"Model":"bge-base-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.24,"SprintDuplicateQuestions":96.38,"TwitterSemEval2015":76.41,"TwitterURLCorpus":85.93} -{"index":66,"Rank":49,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":86.21,"SprintDuplicateQuestions":96.25,"TwitterSemEval2015":76.14,"TwitterURLCorpus":86.23} -{"index":161,"Rank":50,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.19,"SprintDuplicateQuestions":91.18,"TwitterSemEval2015":80.27,"TwitterURLCorpus":87.12} -{"index":36,"Rank":51,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.15,"SprintDuplicateQuestions":96.79,"TwitterSemEval2015":75.16,"TwitterURLCorpus":86.49} -{"index":237,"Rank":52,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":86.13,"SprintDuplicateQuestions":95.45,"TwitterSemEval2015":77.81,"TwitterURLCorpus":85.14} -{"index":238,"Rank":53,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":86.12,"SprintDuplicateQuestions":95.68,"TwitterSemEval2015":77.54,"TwitterURLCorpus":85.13} -{"index":245,"Rank":54,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":86.06,"SprintDuplicateQuestions":91.44,"TwitterSemEval2015":80.89,"TwitterURLCorpus":85.86} -{"index":155,"Rank":55,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":86.03,"SprintDuplicateQuestions":94.92,"TwitterSemEval2015":76.92,"TwitterURLCorpus":86.25} -{"index":93,"Rank":56,"Model":"e5-large-v2-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.03,"SprintDuplicateQuestions":94.92,"TwitterSemEval2015":76.92,"TwitterURLCorpus":86.25} -{"index":154,"Rank":57,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":85.94,"SprintDuplicateQuestions":95.42,"TwitterSemEval2015":76.1,"TwitterURLCorpus":86.31} -{"index":148,"Rank":58,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":85.89,"SprintDuplicateQuestions":93.07,"TwitterSemEval2015":77.42,"TwitterURLCorpus":87.18} -{"index":193,"Rank":59,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.89,"SprintDuplicateQuestions":96.05,"TwitterSemEval2015":76.08,"TwitterURLCorpus":85.54} -{"index":23,"Rank":60,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.84,"SprintDuplicateQuestions":90.94,"TwitterSemEval2015":79.64,"TwitterURLCorpus":86.95} -{"index":34,"Rank":61,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.84,"SprintDuplicateQuestions":96.25,"TwitterSemEval2015":74.8,"TwitterURLCorpus":86.46} -{"index":126,"Rank":62,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.79,"SprintDuplicateQuestions":92.82,"TwitterSemEval2015":77.96,"TwitterURLCorpus":86.59} -{"index":205,"Rank":63,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.79,"SprintDuplicateQuestions":92.82,"TwitterSemEval2015":77.96,"TwitterURLCorpus":86.59} -{"index":17,"Rank":64,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":85.79,"SprintDuplicateQuestions":92.82,"TwitterSemEval2015":77.96,"TwitterURLCorpus":86.59} -{"index":153,"Rank":65,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":85.73,"SprintDuplicateQuestions":94.58,"TwitterSemEval2015":75.97,"TwitterURLCorpus":86.63} -{"index":140,"Rank":66,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":85.73,"SprintDuplicateQuestions":94.58,"TwitterSemEval2015":75.97,"TwitterURLCorpus":86.63} -{"index":283,"Rank":67,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.72,"SprintDuplicateQuestions":92.25,"TwitterSemEval2015":77.13,"TwitterURLCorpus":87.78} -{"index":267,"Rank":68,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.41,"SprintDuplicateQuestions":95.02,"TwitterSemEval2015":75.24,"TwitterURLCorpus":85.96} -{"index":169,"Rank":69,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.4,"SprintDuplicateQuestions":95.62,"TwitterSemEval2015":73.81,"TwitterURLCorpus":86.78} -{"index":175,"Rank":70,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":85.38,"SprintDuplicateQuestions":95.3,"TwitterSemEval2015":74.74,"TwitterURLCorpus":86.09} -{"index":135,"Rank":71,"Model":"jina-embeddings-v2-base-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.38,"SprintDuplicateQuestions":95.3,"TwitterSemEval2015":74.74,"TwitterURLCorpus":86.09} -{"index":18,"Rank":72,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.33,"SprintDuplicateQuestions":94.94,"TwitterSemEval2015":75.53,"TwitterURLCorpus":85.51} -{"index":236,"Rank":73,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":85.32,"SprintDuplicateQuestions":95.05,"TwitterSemEval2015":76.03,"TwitterURLCorpus":84.89} -{"index":243,"Rank":74,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":85.18,"SprintDuplicateQuestions":91.23,"TwitterSemEval2015":78.25,"TwitterURLCorpus":86.05} -{"index":129,"Rank":75,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":85.15,"SprintDuplicateQuestions":93.52,"TwitterSemEval2015":75.23,"TwitterURLCorpus":86.71} -{"index":206,"Rank":76,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":85.15,"SprintDuplicateQuestions":93.52,"TwitterSemEval2015":75.23,"TwitterURLCorpus":86.71} -{"index":28,"Rank":77,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.15,"SprintDuplicateQuestions":93.52,"TwitterSemEval2015":75.23,"TwitterURLCorpus":86.71} -{"index":29,"Rank":78,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.15,"SprintDuplicateQuestions":93.52,"TwitterSemEval2015":75.23,"TwitterURLCorpus":86.71} -{"index":26,"Rank":79,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.15,"SprintDuplicateQuestions":93.52,"TwitterSemEval2015":75.23,"TwitterURLCorpus":86.71} -{"index":27,"Rank":80,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.15,"SprintDuplicateQuestions":93.52,"TwitterSemEval2015":75.23,"TwitterURLCorpus":86.71} -{"index":152,"Rank":81,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":85.09,"SprintDuplicateQuestions":94.88,"TwitterSemEval2015":74.4,"TwitterURLCorpus":85.98} -{"index":157,"Rank":82,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":85.08,"SprintDuplicateQuestions":95.29,"TwitterSemEval2015":74.16,"TwitterURLCorpus":85.79} -{"index":201,"Rank":83,"Model":"bge-large-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.06,"SprintDuplicateQuestions":95.0,"TwitterSemEval2015":74.5,"TwitterURLCorpus":85.69} -{"index":246,"Rank":84,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":85.06,"SprintDuplicateQuestions":88.89,"TwitterSemEval2015":80.28,"TwitterURLCorpus":86.01} -{"index":202,"Rank":85,"Model":"bge-small-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.05,"SprintDuplicateQuestions":96.25,"TwitterSemEval2015":73.26,"TwitterURLCorpus":85.64} -{"index":33,"Rank":86,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.05,"SprintDuplicateQuestions":96.59,"TwitterSemEval2015":72.23,"TwitterURLCorpus":86.32} -{"index":284,"Rank":87,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.04,"SprintDuplicateQuestions":94.58,"TwitterSemEval2015":73.33,"TwitterURLCorpus":87.21} -{"index":253,"Rank":88,"Model":"gte-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.0,"SprintDuplicateQuestions":95.75,"TwitterSemEval2015":73.73,"TwitterURLCorpus":85.53} -{"index":119,"Rank":89,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.99,"SprintDuplicateQuestions":96.14,"TwitterSemEval2015":73.48,"TwitterURLCorpus":85.36} -{"index":125,"Rank":90,"Model":"NoInstruct-small-Embedding-v0-Q4_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.99,"SprintDuplicateQuestions":96.14,"TwitterSemEval2015":73.48,"TwitterURLCorpus":85.36} -{"index":244,"Rank":91,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":84.97,"SprintDuplicateQuestions":89.01,"TwitterSemEval2015":79.75,"TwitterURLCorpus":86.14} -{"index":43,"Rank":92,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.97,"SprintDuplicateQuestions":91.24,"TwitterSemEval2015":77.21,"TwitterURLCorpus":86.45} -{"index":24,"Rank":93,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":84.92,"SprintDuplicateQuestions":96.67,"TwitterSemEval2015":73.24,"TwitterURLCorpus":84.84} -{"index":281,"Rank":94,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.89,"SprintDuplicateQuestions":92.17,"TwitterSemEval2015":75.28,"TwitterURLCorpus":87.22} -{"index":172,"Rank":95,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.8,"SprintDuplicateQuestions":96.09,"TwitterSemEval2015":72.32,"TwitterURLCorpus":85.98} -{"index":160,"Rank":96,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":84.75,"SprintDuplicateQuestions":93.13,"TwitterSemEval2015":75.28,"TwitterURLCorpus":85.83} -{"index":199,"Rank":97,"Model":"bge-base-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.69,"SprintDuplicateQuestions":95.5,"TwitterSemEval2015":72.92,"TwitterURLCorpus":85.66} -{"index":268,"Rank":98,"Model":"gte-large-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.69,"SprintDuplicateQuestions":95.63,"TwitterSemEval2015":72.87,"TwitterURLCorpus":85.58} -{"index":118,"Rank":99,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.68,"SprintDuplicateQuestions":95.79,"TwitterSemEval2015":72.95,"TwitterURLCorpus":85.3} -{"index":158,"Rank":100,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.67,"SprintDuplicateQuestions":94.88,"TwitterSemEval2015":73.34,"TwitterURLCorpus":85.79} -{"index":213,"Rank":101,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.61,"SprintDuplicateQuestions":92.9,"TwitterSemEval2015":74.27,"TwitterURLCorpus":86.65} -{"index":211,"Rank":102,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":84.59,"SprintDuplicateQuestions":92.91,"TwitterSemEval2015":74.3,"TwitterURLCorpus":86.57} -{"index":207,"Rank":103,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.59,"SprintDuplicateQuestions":94.83,"TwitterSemEval2015":72.55,"TwitterURLCorpus":86.38} -{"index":252,"Rank":104,"Model":"gte-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.57,"SprintDuplicateQuestions":95.71,"TwitterSemEval2015":72.47,"TwitterURLCorpus":85.52} -{"index":176,"Rank":105,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.56,"SprintDuplicateQuestions":95.62,"TwitterSemEval2015":71.62,"TwitterURLCorpus":86.44} -{"index":19,"Rank":106,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.53,"SprintDuplicateQuestions":94.16,"TwitterSemEval2015":75.25,"TwitterURLCorpus":84.18} -{"index":124,"Rank":107,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":84.48,"SprintDuplicateQuestions":97.32,"TwitterSemEval2015":70.29,"TwitterURLCorpus":85.83} -{"index":177,"Rank":108,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.44,"SprintDuplicateQuestions":95.12,"TwitterSemEval2015":72.15,"TwitterURLCorpus":86.05} -{"index":136,"Rank":109,"Model":"jina-embeddings-v2-small-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.44,"SprintDuplicateQuestions":95.12,"TwitterSemEval2015":72.15,"TwitterURLCorpus":86.05} -{"index":282,"Rank":110,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.22,"SprintDuplicateQuestions":89.02,"TwitterSemEval2015":76.56,"TwitterURLCorpus":87.09} -{"index":210,"Rank":111,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":84.09,"SprintDuplicateQuestions":92.31,"TwitterSemEval2015":73.61,"TwitterURLCorpus":86.34} -{"index":167,"Rank":112,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.06,"SprintDuplicateQuestions":95.36,"TwitterSemEval2015":70.64,"TwitterURLCorpus":86.18} -{"index":35,"Rank":113,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.95,"SprintDuplicateQuestions":96.6,"TwitterSemEval2015":69.41,"TwitterURLCorpus":85.85} -{"index":235,"Rank":114,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":83.85,"SprintDuplicateQuestions":94.55,"TwitterSemEval2015":72.23,"TwitterURLCorpus":84.77} -{"index":208,"Rank":115,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.67,"SprintDuplicateQuestions":95.93,"TwitterSemEval2015":69.68,"TwitterURLCorpus":85.41} -{"index":174,"Rank":116,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.65,"SprintDuplicateQuestions":94.87,"TwitterSemEval2015":70.25,"TwitterURLCorpus":85.83} -{"index":269,"Rank":117,"Model":"gte-large-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.58,"SprintDuplicateQuestions":94.67,"TwitterSemEval2015":70.34,"TwitterURLCorpus":85.72} -{"index":159,"Rank":118,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":83.57,"SprintDuplicateQuestions":93.01,"TwitterSemEval2015":72.21,"TwitterURLCorpus":85.48} -{"index":209,"Rank":119,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":83.54,"SprintDuplicateQuestions":91.45,"TwitterSemEval2015":73.23,"TwitterURLCorpus":85.93} -{"index":254,"Rank":120,"Model":"gte-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.54,"SprintDuplicateQuestions":95.57,"TwitterSemEval2015":70.12,"TwitterURLCorpus":84.92} -{"index":147,"Rank":121,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":83.51,"SprintDuplicateQuestions":90.35,"TwitterSemEval2015":73.99,"TwitterURLCorpus":86.2} -{"index":116,"Rank":122,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.39,"SprintDuplicateQuestions":94.14,"TwitterSemEval2015":70.46,"TwitterURLCorpus":85.58} -{"index":69,"Rank":123,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.32,"SprintDuplicateQuestions":95.12,"TwitterSemEval2015":69.78,"TwitterURLCorpus":85.07} -{"index":166,"Rank":124,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.11,"SprintDuplicateQuestions":94.58,"TwitterSemEval2015":69.14,"TwitterURLCorpus":85.62} -{"index":270,"Rank":125,"Model":"gte-small-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.09,"SprintDuplicateQuestions":95.49,"TwitterSemEval2015":68.61,"TwitterURLCorpus":85.16} -{"index":171,"Rank":126,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.04,"SprintDuplicateQuestions":95.48,"TwitterSemEval2015":68.3,"TwitterURLCorpus":85.33} -{"index":230,"Rank":127,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":83.04,"SprintDuplicateQuestions":90.15,"TwitterSemEval2015":73.85,"TwitterURLCorpus":85.11} -{"index":42,"Rank":128,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":82.95,"SprintDuplicateQuestions":93.06,"TwitterSemEval2015":71.24,"TwitterURLCorpus":84.54} -{"index":262,"Rank":129,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.83,"SprintDuplicateQuestions":93.02,"TwitterSemEval2015":71.07,"TwitterURLCorpus":84.4} -{"index":107,"Rank":130,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.83,"SprintDuplicateQuestions":95.48,"TwitterSemEval2015":68.17,"TwitterURLCorpus":84.84} -{"index":106,"Rank":131,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.81,"SprintDuplicateQuestions":95.78,"TwitterSemEval2015":67.76,"TwitterURLCorpus":84.89} -{"index":68,"Rank":132,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.69,"SprintDuplicateQuestions":95.35,"TwitterSemEval2015":67.97,"TwitterURLCorpus":84.75} -{"index":76,"Rank":133,"Model":"gte-micro-v4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.59,"SprintDuplicateQuestions":95.54,"TwitterSemEval2015":67.55,"TwitterURLCorpus":84.68} -{"index":162,"Rank":134,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":82.59,"SprintDuplicateQuestions":92.18,"TwitterSemEval2015":70.75,"TwitterURLCorpus":84.83} -{"index":214,"Rank":135,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":82.54,"SprintDuplicateQuestions":95.55,"TwitterSemEval2015":66.85,"TwitterURLCorpus":85.21} -{"index":228,"Rank":136,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":82.41,"SprintDuplicateQuestions":92.45,"TwitterSemEval2015":70.02,"TwitterURLCorpus":84.77} -{"index":229,"Rank":137,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":82.37,"SprintDuplicateQuestions":94.55,"TwitterSemEval2015":67.86,"TwitterURLCorpus":84.7} -{"index":192,"Rank":138,"Model":"all-MiniLM-L6-v2-ds<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.37,"SprintDuplicateQuestions":94.55,"TwitterSemEval2015":67.86,"TwitterURLCorpus":84.7} -{"index":212,"Rank":139,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":82.26,"SprintDuplicateQuestions":90.06,"TwitterSemEval2015":71.68,"TwitterURLCorpus":85.03} -{"index":112,"Rank":140,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.14,"SprintDuplicateQuestions":92.92,"TwitterSemEval2015":67.8,"TwitterURLCorpus":85.71} -{"index":128,"Rank":141,"Model":"cai-stellaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.11,"SprintDuplicateQuestions":93.23,"TwitterSemEval2015":69.6,"TwitterURLCorpus":83.49} -{"index":103,"Rank":142,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.07,"SprintDuplicateQuestions":95.28,"TwitterSemEval2015":65.78,"TwitterURLCorpus":85.15} -{"index":101,"Rank":143,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.02,"SprintDuplicateQuestions":94.67,"TwitterSemEval2015":66.17,"TwitterURLCorpus":85.21} -{"index":83,"Rank":144,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.0,"SprintDuplicateQuestions":93.84,"TwitterSemEval2015":66.87,"TwitterURLCorpus":85.29} -{"index":203,"Rank":145,"Model":"bge-small-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.93,"SprintDuplicateQuestions":94.59,"TwitterSemEval2015":67.01,"TwitterURLCorpus":84.2} -{"index":123,"Rank":146,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.9,"SprintDuplicateQuestions":94.93,"TwitterSemEval2015":65.31,"TwitterURLCorpus":85.46} -{"index":105,"Rank":147,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.77,"SprintDuplicateQuestions":95.3,"TwitterSemEval2015":65.79,"TwitterURLCorpus":84.22} -{"index":239,"Rank":148,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":81.74,"SprintDuplicateQuestions":96.09,"TwitterSemEval2015":65.95,"TwitterURLCorpus":83.17} -{"index":113,"Rank":149,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.4,"SprintDuplicateQuestions":95.15,"TwitterSemEval2015":65.79,"TwitterURLCorpus":83.27} -{"index":104,"Rank":150,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.33,"SprintDuplicateQuestions":94.96,"TwitterSemEval2015":64.32,"TwitterURLCorpus":84.7} -{"index":285,"Rank":151,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":81.32,"SprintDuplicateQuestions":94.53,"TwitterSemEval2015":64.41,"TwitterURLCorpus":85.01} -{"index":100,"Rank":152,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.07,"SprintDuplicateQuestions":95.21,"TwitterSemEval2015":63.28,"TwitterURLCorpus":84.72} -{"index":168,"Rank":153,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.05,"SprintDuplicateQuestions":92.09,"TwitterSemEval2015":65.96,"TwitterURLCorpus":85.11} -{"index":132,"Rank":154,"Model":"embedder-100p<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.03,"SprintDuplicateQuestions":90.68,"TwitterSemEval2015":67.77,"TwitterURLCorpus":84.65} -{"index":65,"Rank":155,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":80.94,"SprintDuplicateQuestions":91.3,"TwitterSemEval2015":68.76,"TwitterURLCorpus":82.76} -{"index":134,"Rank":156,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":80.86,"SprintDuplicateQuestions":93.84,"TwitterSemEval2015":64.72,"TwitterURLCorpus":84.01} -{"index":242,"Rank":157,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":80.81,"SprintDuplicateQuestions":90.55,"TwitterSemEval2015":66.75,"TwitterURLCorpus":85.14} -{"index":121,"Rank":158,"Model":"snowflake-arctic-embed-l-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.8,"SprintDuplicateQuestions":95.2,"TwitterSemEval2015":62.35,"TwitterURLCorpus":84.84} -{"index":99,"Rank":159,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.8,"SprintDuplicateQuestions":95.2,"TwitterSemEval2015":62.35,"TwitterURLCorpus":84.84} -{"index":73,"Rank":160,"Model":"gte-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.73,"SprintDuplicateQuestions":94.26,"TwitterSemEval2015":63.85,"TwitterURLCorpus":84.07} -{"index":82,"Rank":161,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.65,"SprintDuplicateQuestions":93.47,"TwitterSemEval2015":63.68,"TwitterURLCorpus":84.8} -{"index":72,"Rank":162,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.62,"SprintDuplicateQuestions":89.85,"TwitterSemEval2015":67.48,"TwitterURLCorpus":84.53} -{"index":71,"Rank":163,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.57,"SprintDuplicateQuestions":89.26,"TwitterSemEval2015":69.26,"TwitterURLCorpus":83.19} -{"index":77,"Rank":164,"Model":"test24<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.4,"SprintDuplicateQuestions":94.4,"TwitterSemEval2015":63.86,"TwitterURLCorpus":82.95} -{"index":44,"Rank":165,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.4,"SprintDuplicateQuestions":90.41,"TwitterSemEval2015":67.67,"TwitterURLCorpus":83.11} -{"index":98,"Rank":166,"Model":"yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.84,"SprintDuplicateQuestions":92.98,"TwitterSemEval2015":62.44,"TwitterURLCorpus":84.1} -{"index":78,"Rank":167,"Model":"test25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.7,"SprintDuplicateQuestions":92.14,"TwitterSemEval2015":63.44,"TwitterURLCorpus":83.53} -{"index":260,"Rank":168,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.62,"SprintDuplicateQuestions":90.89,"TwitterSemEval2015":63.76,"TwitterURLCorpus":84.2} -{"index":79,"Rank":169,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.58,"SprintDuplicateQuestions":92.58,"TwitterSemEval2015":62.37,"TwitterURLCorpus":83.79} -{"index":258,"Rank":170,"Model":"universal-sentence-encoder-large-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.53,"SprintDuplicateQuestions":84.37,"TwitterSemEval2015":70.13,"TwitterURLCorpus":84.09} -{"index":185,"Rank":171,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.23,"SprintDuplicateQuestions":86.13,"TwitterSemEval2015":68.03,"TwitterURLCorpus":83.52} -{"index":173,"Rank":172,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.22,"SprintDuplicateQuestions":94.93,"TwitterSemEval2015":59.3,"TwitterURLCorpus":83.44} -{"index":227,"Rank":173,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":78.87,"SprintDuplicateQuestions":89.26,"TwitterSemEval2015":62.78,"TwitterURLCorpus":84.58} -{"index":70,"Rank":174,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.57,"SprintDuplicateQuestions":85.22,"TwitterSemEval2015":67.56,"TwitterURLCorpus":82.94} -{"index":241,"Rank":175,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":78.45,"SprintDuplicateQuestions":89.46,"TwitterSemEval2015":62.06,"TwitterURLCorpus":83.83} -{"index":63,"Rank":176,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":78.01,"SprintDuplicateQuestions":88.14,"TwitterSemEval2015":66.6,"TwitterURLCorpus":79.3} -{"index":61,"Rank":177,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":77.88,"SprintDuplicateQuestions":87.57,"TwitterSemEval2015":65.14,"TwitterURLCorpus":80.94} -{"index":184,"Rank":178,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.83,"SprintDuplicateQuestions":88.21,"TwitterSemEval2015":64.8,"TwitterURLCorpus":80.49} -{"index":279,"Rank":179,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.79,"SprintDuplicateQuestions":79.85,"TwitterSemEval2015":69.45,"TwitterURLCorpus":84.06} -{"index":251,"Rank":180,"Model":"Angle_BERT<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.75,"SprintDuplicateQuestions":82.81,"TwitterSemEval2015":66.16,"TwitterURLCorpus":84.28} -{"index":234,"Rank":181,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":77.61,"SprintDuplicateQuestions":87.15,"TwitterSemEval2015":61.67,"TwitterURLCorpus":84.02} -{"index":278,"Rank":182,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.46,"SprintDuplicateQuestions":76.46,"TwitterSemEval2015":70.85,"TwitterURLCorpus":85.08} -{"index":84,"Rank":183,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.03,"SprintDuplicateQuestions":80.54,"TwitterSemEval2015":66.0,"TwitterURLCorpus":84.54} -{"index":30,"Rank":184,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.96,"SprintDuplicateQuestions":90.71,"TwitterSemEval2015":58.07,"TwitterURLCorpus":82.09} -{"index":183,"Rank":185,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.9,"SprintDuplicateQuestions":87.07,"TwitterSemEval2015":62.51,"TwitterURLCorpus":81.11} -{"index":277,"Rank":186,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.86,"SprintDuplicateQuestions":77.85,"TwitterSemEval2015":69.04,"TwitterURLCorpus":83.69} -{"index":248,"Rank":187,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.49,"SprintDuplicateQuestions":83.53,"TwitterSemEval2015":62.49,"TwitterURLCorpus":83.46} -{"index":257,"Rank":188,"Model":"universal-sentence-encoder-4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.23,"SprintDuplicateQuestions":86.37,"TwitterSemEval2015":60.64,"TwitterURLCorpus":81.68} -{"index":280,"Rank":189,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.9,"SprintDuplicateQuestions":69.52,"TwitterSemEval2015":74.42,"TwitterURLCorpus":83.75} -{"index":259,"Rank":190,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.43,"SprintDuplicateQuestions":87.05,"TwitterSemEval2015":57.03,"TwitterURLCorpus":82.21} -{"index":80,"Rank":191,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.23,"SprintDuplicateQuestions":89.89,"TwitterSemEval2015":54.75,"TwitterURLCorpus":81.06} -{"index":59,"Rank":192,"Model":"paraphrase-multilingual-mpnet-base-v2-KE_Sieve<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.53,"SprintDuplicateQuestions":77.36,"TwitterSemEval2015":63.58,"TwitterURLCorpus":82.64} -{"index":217,"Rank":193,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":73.68,"SprintDuplicateQuestions":69.39,"TwitterSemEval2015":67.75,"TwitterURLCorpus":83.89} -{"index":233,"Rank":194,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":72.94,"SprintDuplicateQuestions":85.55,"TwitterSemEval2015":53.85,"TwitterURLCorpus":79.41} -{"index":67,"Rank":195,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":72.21,"SprintDuplicateQuestions":77.36,"TwitterSemEval2015":61.54,"TwitterURLCorpus":77.73} -{"index":256,"Rank":196,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.88,"SprintDuplicateQuestions":75.53,"TwitterSemEval2015":58.77,"TwitterURLCorpus":81.33} -{"index":81,"Rank":197,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.78,"SprintDuplicateQuestions":77.73,"TwitterSemEval2015":57.09,"TwitterURLCorpus":80.51} -{"index":141,"Rank":198,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.4,"SprintDuplicateQuestions":77.08,"TwitterSemEval2015":53.58,"TwitterURLCorpus":83.53} -{"index":232,"Rank":199,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":70.92,"SprintDuplicateQuestions":86.96,"TwitterSemEval2015":48.45,"TwitterURLCorpus":77.35} -{"index":218,"Rank":200,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":70.33,"SprintDuplicateQuestions":69.41,"TwitterSemEval2015":60.21,"TwitterURLCorpus":81.37} -{"index":11,"Rank":201,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":68.86,"SprintDuplicateQuestions":65.54,"TwitterSemEval2015":59.57,"TwitterURLCorpus":81.47} -{"index":231,"Rank":202,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":61.37,"SprintDuplicateQuestions":71.63,"TwitterSemEval2015":43.25,"TwitterURLCorpus":69.22} -{"index":122,"Rank":203,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":56.33,"SprintDuplicateQuestions":36.81,"TwitterSemEval2015":55.9,"TwitterURLCorpus":76.29} -{"index":255,"Rank":204,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":30.61,"SprintDuplicateQuestions":17.4,"TwitterSemEval2015":30.38,"TwitterURLCorpus":44.04} -{"index":263,"Rank":205,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":0.79,"SprintDuplicateQuestions":0.85,"TwitterSemEval2015":0.67,"TwitterURLCorpus":0.84} -{"index":97,"Rank":245,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","SprintDuplicateQuestions":57.56,"TwitterSemEval2015":"","TwitterURLCorpus":""} +{"Rank":1,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":81.65,"OpusparcusPC (fr)":93.77,"PawsXPairClassification (fr)":69.53} +{"Rank":2,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":78.96,"OpusparcusPC (fr)":93.94,"PawsXPairClassification (fr)":63.98} +{"Rank":3,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.81,"OpusparcusPC (fr)":92.04,"PawsXPairClassification (fr)":65.57} +{"Rank":4,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.66,"OpusparcusPC (fr)":93.68,"PawsXPairClassification (fr)":63.64} +{"Rank":5,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.67,"OpusparcusPC (fr)":94.08,"PawsXPairClassification (fr)":61.26} +{"Rank":6,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":77.5,"OpusparcusPC (fr)":92.48,"PawsXPairClassification (fr)":62.52} +{"Rank":7,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.32,"OpusparcusPC (fr)":92.61,"PawsXPairClassification (fr)":62.02} +{"Rank":8,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.3,"OpusparcusPC (fr)":93.06,"PawsXPairClassification (fr)":61.54} +{"Rank":9,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":77.28,"OpusparcusPC (fr)":91.42,"PawsXPairClassification (fr)":63.13} +{"Rank":10,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.14,"OpusparcusPC (fr)":94.12,"PawsXPairClassification (fr)":60.16} +{"Rank":11,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":77.11,"OpusparcusPC (fr)":94.63,"PawsXPairClassification (fr)":59.59} +{"Rank":12,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.85,"OpusparcusPC (fr)":92.87,"PawsXPairClassification (fr)":60.83} +{"Rank":13,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":76.19,"OpusparcusPC (fr)":93.89,"PawsXPairClassification (fr)":58.5} +{"Rank":14,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":76.08,"OpusparcusPC (fr)":88.5,"PawsXPairClassification (fr)":63.65} +{"Rank":15,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":75.8,"OpusparcusPC (fr)":93.45,"PawsXPairClassification (fr)":58.14} +{"Rank":16,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":75.39,"OpusparcusPC (fr)":91.19,"PawsXPairClassification (fr)":59.59} +{"Rank":17,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.82,"OpusparcusPC (fr)":92.72,"PawsXPairClassification (fr)":56.93} +{"Rank":18,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":74.74,"OpusparcusPC (fr)":92.05,"PawsXPairClassification (fr)":57.44} +{"Rank":19,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":74.47,"OpusparcusPC (fr)":92.01,"PawsXPairClassification (fr)":56.94} +{"Rank":20,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.36,"OpusparcusPC (fr)":89.76,"PawsXPairClassification (fr)":58.96} +{"Rank":21,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":74.3,"OpusparcusPC (fr)":93.96,"PawsXPairClassification (fr)":54.63} +{"Rank":22,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.12,"OpusparcusPC (fr)":90.92,"PawsXPairClassification (fr)":57.32} +{"Rank":23,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":74.1,"OpusparcusPC (fr)":92.52,"PawsXPairClassification (fr)":55.68} +{"Rank":24,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.76,"OpusparcusPC (fr)":85.54,"PawsXPairClassification (fr)":61.99} +{"Rank":25,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.5,"OpusparcusPC (fr)":93.38,"PawsXPairClassification (fr)":53.62} +{"Rank":26,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":72.72,"OpusparcusPC (fr)":88.07,"PawsXPairClassification (fr)":57.36} +{"Rank":27,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":72.38,"OpusparcusPC (fr)":89.4,"PawsXPairClassification (fr)":55.35} +{"Rank":28,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.92,"OpusparcusPC (fr)":91.46,"PawsXPairClassification (fr)":52.39} +{"Rank":29,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":71.57,"OpusparcusPC (fr)":92.07,"PawsXPairClassification (fr)":51.08} +{"Rank":30,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":70.96,"OpusparcusPC (fr)":86.53,"PawsXPairClassification (fr)":55.4} +{"Rank":31,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.9,"OpusparcusPC (fr)":82.1,"PawsXPairClassification (fr)":59.69} +{"Rank":32,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":70.32,"OpusparcusPC (fr)":87.43,"PawsXPairClassification (fr)":53.22} +{"Rank":33,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":70.1,"OpusparcusPC (fr)":86.79,"PawsXPairClassification (fr)":53.4} +{"Rank":34,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":70.09,"OpusparcusPC (fr)":86.79,"PawsXPairClassification (fr)":53.39} +{"Rank":35,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":70.08,"OpusparcusPC (fr)":86.78,"PawsXPairClassification (fr)":53.38} +{"Rank":36,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":70.08,"OpusparcusPC (fr)":86.77,"PawsXPairClassification (fr)":53.39} +{"Rank":37,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":68.56,"OpusparcusPC (fr)":83.73,"PawsXPairClassification (fr)":53.38} +{"Rank":38,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":68.4,"OpusparcusPC (fr)":85.45,"PawsXPairClassification (fr)":51.35} +{"Rank":39,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":67.39,"OpusparcusPC (fr)":82.0,"PawsXPairClassification (fr)":52.78} +{"Rank":40,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":67.02,"OpusparcusPC (fr)":82.15,"PawsXPairClassification (fr)":51.89} +{"Rank":41,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":64.46,"OpusparcusPC (fr)":74.78,"PawsXPairClassification (fr)":54.14} +{"Rank":42,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":43,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":44,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":45,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":46,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":47,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":48,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":49,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":50,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":51,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":52,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":53,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":54,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":55,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":56,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"OpusparcusPC (fr)":94.45,"PawsXPairClassification (fr)":null} diff --git a/all_data_tasks/20/default.jsonl b/all_data_tasks/20/default.jsonl index 822f0781ab1cc0acfb2aefbc7624d01201fa8f97..7835f3bf79deb1ea8a2ffbe946057cd3dd174231 100644 --- a/all_data_tasks/20/default.jsonl +++ b/all_data_tasks/20/default.jsonl @@ -1,60 +1,57 @@ -{"index":9,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":55.65,"AlloprofRetrieval":58.88,"BSARDRetrieval":18.8,"MintakaRetrieval (fr)":54.03,"SyntecRetrieval":78.25,"XPQARetrieval (fr)":68.3} -{"index":34,"Rank":2,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.65,"AlloprofRetrieval":58.88,"BSARDRetrieval":18.8,"MintakaRetrieval (fr)":54.03,"SyntecRetrieval":78.25,"XPQARetrieval (fr)":68.3} -{"index":56,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.65,"AlloprofRetrieval":58.88,"BSARDRetrieval":18.8,"MintakaRetrieval (fr)":54.03,"SyntecRetrieval":78.25,"XPQARetrieval (fr)":68.3} -{"index":4,"Rank":4,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.56,"AlloprofRetrieval":58.27,"BSARDRetrieval":5.14,"MintakaRetrieval (fr)":49.19,"SyntecRetrieval":87.28,"XPQARetrieval (fr)":72.92} -{"index":3,"Rank":5,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.98,"AlloprofRetrieval":57.28,"BSARDRetrieval":11.83,"MintakaRetrieval (fr)":34.92,"SyntecRetrieval":87.33,"XPQARetrieval (fr)":73.56} -{"index":8,"Rank":6,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.56,"AlloprofRetrieval":57.89,"BSARDRetrieval":6.3,"MintakaRetrieval (fr)":42.56,"SyntecRetrieval":90.47,"XPQARetrieval (fr)":65.58} -{"index":55,"Rank":7,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.56,"AlloprofRetrieval":57.89,"BSARDRetrieval":6.3,"MintakaRetrieval (fr)":42.56,"SyntecRetrieval":90.47,"XPQARetrieval (fr)":65.58} -{"index":22,"Rank":8,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.09,"AlloprofRetrieval":47.6,"BSARDRetrieval":19.58,"MintakaRetrieval (fr)":32.62,"SyntecRetrieval":84.2,"XPQARetrieval (fr)":66.43} -{"index":81,"Rank":9,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.23,"AlloprofRetrieval":51.64,"BSARDRetrieval":0.61,"MintakaRetrieval (fr)":29.94,"SyntecRetrieval":85.97,"XPQARetrieval (fr)":73.0} -{"index":0,"Rank":10,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.81,"AlloprofRetrieval":56.84,"BSARDRetrieval":2.48,"MintakaRetrieval (fr)":21.73,"SyntecRetrieval":78.77,"XPQARetrieval (fr)":74.24} -{"index":29,"Rank":11,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.78,"AlloprofRetrieval":46.94,"BSARDRetrieval":2.08,"MintakaRetrieval (fr)":30.07,"SyntecRetrieval":84.6,"XPQARetrieval (fr)":70.22} -{"index":23,"Rank":12,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.68,"AlloprofRetrieval":44.42,"BSARDRetrieval":12.01,"MintakaRetrieval (fr)":25.19,"SyntecRetrieval":82.86,"XPQARetrieval (fr)":68.91} -{"index":2,"Rank":13,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.33,"AlloprofRetrieval":52.61,"BSARDRetrieval":0.29,"MintakaRetrieval (fr)":19.05,"SyntecRetrieval":82.77,"XPQARetrieval (fr)":71.95} -{"index":21,"Rank":14,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.99,"AlloprofRetrieval":41.26,"BSARDRetrieval":12.22,"MintakaRetrieval (fr)":27.6,"SyntecRetrieval":79.68,"XPQARetrieval (fr)":64.21} -{"index":53,"Rank":15,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.9,"AlloprofRetrieval":49.84,"BSARDRetrieval":0.22,"MintakaRetrieval (fr)":31.25,"SyntecRetrieval":76.63,"XPQARetrieval (fr)":66.55} -{"index":54,"Rank":16,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.41,"AlloprofRetrieval":49.05,"BSARDRetrieval":0.16,"MintakaRetrieval (fr)":31.06,"SyntecRetrieval":76.96,"XPQARetrieval (fr)":64.8} -{"index":52,"Rank":17,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.85,"AlloprofRetrieval":47.69,"BSARDRetrieval":1.75,"MintakaRetrieval (fr)":27.88,"SyntecRetrieval":77.67,"XPQARetrieval (fr)":64.27} -{"index":72,"Rank":18,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":43.84,"AlloprofRetrieval":45.75,"BSARDRetrieval":3.33,"MintakaRetrieval (fr)":34.93,"SyntecRetrieval":78.97,"XPQARetrieval (fr)":56.2} -{"index":50,"Rank":19,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.58,"AlloprofRetrieval":45.1,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":25.17,"SyntecRetrieval":79.14,"XPQARetrieval (fr)":68.49} -{"index":80,"Rank":20,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.96,"AlloprofRetrieval":38.06,"BSARDRetrieval":11.04,"MintakaRetrieval (fr)":27.48,"SyntecRetrieval":78.2,"XPQARetrieval (fr)":60.02} -{"index":24,"Rank":21,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.92,"AlloprofRetrieval":39.89,"BSARDRetrieval":8.41,"MintakaRetrieval (fr)":25.52,"SyntecRetrieval":77.57,"XPQARetrieval (fr)":63.2} -{"index":45,"Rank":22,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":42.17,"AlloprofRetrieval":38.15,"BSARDRetrieval":0.27,"MintakaRetrieval (fr)":25.2,"SyntecRetrieval":81.07,"XPQARetrieval (fr)":66.15} -{"index":44,"Rank":23,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":41.19,"AlloprofRetrieval":36.21,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":23.46,"SyntecRetrieval":80.49,"XPQARetrieval (fr)":65.81} -{"index":51,"Rank":24,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.15,"AlloprofRetrieval":45.41,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":25.7,"SyntecRetrieval":75.75,"XPQARetrieval (fr)":58.88} -{"index":1,"Rank":25,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.81,"AlloprofRetrieval":45.5,"BSARDRetrieval":0.15,"MintakaRetrieval (fr)":15.51,"SyntecRetrieval":75.83,"XPQARetrieval (fr)":67.07} -{"index":15,"Rank":26,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.42,"AlloprofRetrieval":38.36,"BSARDRetrieval":0.14,"MintakaRetrieval (fr)":25.44,"SyntecRetrieval":79.27,"XPQARetrieval (fr)":58.87} -{"index":36,"Rank":27,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":40.04,"AlloprofRetrieval":31.62,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":21.87,"SyntecRetrieval":81.11,"XPQARetrieval (fr)":65.62} -{"index":71,"Rank":28,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":39.69,"AlloprofRetrieval":40.38,"BSARDRetrieval":0.14,"MintakaRetrieval (fr)":31.54,"SyntecRetrieval":74.24,"XPQARetrieval (fr)":52.14} -{"index":76,"Rank":29,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.16,"AlloprofRetrieval":35.27,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":26.12,"SyntecRetrieval":69.82,"XPQARetrieval (fr)":59.59} -{"index":77,"Rank":30,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.78,"AlloprofRetrieval":33.78,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":26.21,"SyntecRetrieval":63.69,"XPQARetrieval (fr)":65.21} -{"index":30,"Rank":31,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":36.81,"AlloprofRetrieval":29.97,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":21.31,"SyntecRetrieval":74.2,"XPQARetrieval (fr)":58.57} -{"index":47,"Rank":32,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":36.55,"AlloprofRetrieval":27.01,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":22.53,"SyntecRetrieval":75.76,"XPQARetrieval (fr)":57.47} -{"index":14,"Rank":33,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.1,"AlloprofRetrieval":35.39,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":23.0,"SyntecRetrieval":76.88,"XPQARetrieval (fr)":45.23} -{"index":70,"Rank":34,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":35.66,"AlloprofRetrieval":34.52,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":23.92,"SyntecRetrieval":71.05,"XPQARetrieval (fr)":48.79} -{"index":68,"Rank":35,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":35.49,"AlloprofRetrieval":30.8,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":24.45,"SyntecRetrieval":76.0,"XPQARetrieval (fr)":46.22} -{"index":63,"Rank":36,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":33.22,"AlloprofRetrieval":26.99,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":22.55,"SyntecRetrieval":65.34,"XPQARetrieval (fr)":51.2} -{"index":35,"Rank":37,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":32.37,"AlloprofRetrieval":21.94,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":13.36,"SyntecRetrieval":68.62,"XPQARetrieval (fr)":57.92} -{"index":69,"Rank":38,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":32.18,"AlloprofRetrieval":27.52,"BSARDRetrieval":0.16,"MintakaRetrieval (fr)":21.04,"SyntecRetrieval":67.0,"XPQARetrieval (fr)":45.19} -{"index":67,"Rank":39,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":31.24,"AlloprofRetrieval":26.63,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":21.53,"SyntecRetrieval":65.54,"XPQARetrieval (fr)":42.51} -{"index":66,"Rank":40,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":30.69,"AlloprofRetrieval":30.23,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":16.31,"SyntecRetrieval":58.07,"XPQARetrieval (fr)":48.83} -{"index":59,"Rank":41,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":29.91,"AlloprofRetrieval":28.41,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":9.19,"SyntecRetrieval":60.15,"XPQARetrieval (fr)":51.79} -{"index":57,"Rank":42,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":28.47,"AlloprofRetrieval":19.77,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":15.53,"SyntecRetrieval":55.31,"XPQARetrieval (fr)":51.74} -{"index":75,"Rank":43,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":24.76,"AlloprofRetrieval":18.9,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":14.81,"SyntecRetrieval":49.69,"XPQARetrieval (fr)":40.4} -{"index":43,"Rank":44,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":23.44,"AlloprofRetrieval":16.46,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":3.57,"SyntecRetrieval":55.9,"XPQARetrieval (fr)":41.29} -{"index":48,"Rank":45,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.91,"AlloprofRetrieval":12.37,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":2.78,"SyntecRetrieval":40.57,"XPQARetrieval (fr)":33.82} -{"index":5,"Rank":46,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":16.19,"AlloprofRetrieval":3.1,"BSARDRetrieval":0.36,"MintakaRetrieval (fr)":6.31,"SyntecRetrieval":28.58,"XPQARetrieval (fr)":42.59} -{"index":42,"Rank":47,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":13.89,"AlloprofRetrieval":5.51,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":2.87,"SyntecRetrieval":34.95,"XPQARetrieval (fr)":26.12} -{"index":7,"Rank":48,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":12.94,"AlloprofRetrieval":6.83,"BSARDRetrieval":2.18,"MintakaRetrieval (fr)":1.66,"SyntecRetrieval":27.64,"XPQARetrieval (fr)":26.39} -{"index":41,"Rank":49,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":8.52,"AlloprofRetrieval":1.63,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":3.55,"SyntecRetrieval":18.95,"XPQARetrieval (fr)":18.49} -{"index":18,"Rank":50,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":8.51,"AlloprofRetrieval":1.6,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":3.55,"SyntecRetrieval":18.95,"XPQARetrieval (fr)":18.46} -{"index":16,"Rank":51,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":8.5,"AlloprofRetrieval":1.6,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":3.55,"SyntecRetrieval":18.95,"XPQARetrieval (fr)":18.39} -{"index":17,"Rank":52,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":8.49,"AlloprofRetrieval":1.61,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":3.55,"SyntecRetrieval":18.95,"XPQARetrieval (fr)":18.35} -{"index":49,"Rank":53,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":7.98,"AlloprofRetrieval":1.98,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":0.48,"SyntecRetrieval":24.45,"XPQARetrieval (fr)":12.98} -{"index":39,"Rank":54,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":6.73,"AlloprofRetrieval":1.72,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":0.51,"SyntecRetrieval":22.33,"XPQARetrieval (fr)":9.09} -{"index":38,"Rank":55,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":5.87,"AlloprofRetrieval":1.63,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":0.58,"SyntecRetrieval":20.56,"XPQARetrieval (fr)":6.59} -{"index":79,"Rank":56,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":4.14,"AlloprofRetrieval":0.52,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":0.9,"SyntecRetrieval":6.6,"XPQARetrieval (fr)":12.7} -{"index":78,"Rank":57,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":3.2,"AlloprofRetrieval":0.16,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":0.88,"SyntecRetrieval":3.33,"XPQARetrieval (fr)":11.65} -{"index":40,"Rank":58,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":1.22,"AlloprofRetrieval":0.58,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":0.26,"SyntecRetrieval":1.58,"XPQARetrieval (fr)":3.69} -{"index":10,"Rank":60,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AlloprofRetrieval":58.5,"BSARDRetrieval":28.52,"MintakaRetrieval (fr)":62.53,"SyntecRetrieval":90.37,"XPQARetrieval (fr)":""} -{"index":58,"Rank":75,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AlloprofRetrieval":33.2,"BSARDRetrieval":"","MintakaRetrieval (fr)":"","SyntecRetrieval":"","XPQARetrieval (fr)":55.9} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":43.75,"AlloprofRetrieval":38.15,"AlloprofRetrieval (fra-Latn)":39.34,"BSARDRetrieval":0.27,"BSARDRetrieval (fra-Latn)":21.28,"SyntecRetrieval":81.07,"SyntecRetrieval (fra-Latn)":82.39} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":42.14,"AlloprofRetrieval":36.21,"AlloprofRetrieval (fra-Latn)":34.45,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":18.83,"SyntecRetrieval":80.49,"SyntecRetrieval (fra-Latn)":82.86} +{"Rank":3,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":37.8,"AlloprofRetrieval":30.8,"AlloprofRetrieval (fra-Latn)":30.8,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":13.19,"SyntecRetrieval":76.0,"SyntecRetrieval (fra-Latn)":76.0} +{"Rank":4,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":36.36,"AlloprofRetrieval":27.01,"AlloprofRetrieval (fra-Latn)":27.38,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":14.54,"SyntecRetrieval":75.76,"SyntecRetrieval (fra-Latn)":73.46} +{"Rank":5,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":32.32,"AlloprofRetrieval":26.63,"AlloprofRetrieval (fra-Latn)":26.63,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":9.6,"SyntecRetrieval":65.54,"SyntecRetrieval (fra-Latn)":65.54} +{"Rank":6,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":30.32,"AlloprofRetrieval":28.41,"AlloprofRetrieval (fra-Latn)":28.41,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":4.8,"SyntecRetrieval":60.15,"SyntecRetrieval (fra-Latn)":60.15} +{"Rank":7,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":25.77,"AlloprofRetrieval":19.77,"AlloprofRetrieval (fra-Latn)":19.77,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":4.44,"SyntecRetrieval":55.31,"SyntecRetrieval (fra-Latn)":55.31} +{"Rank":8,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":56.84,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":2.48,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":78.77,"SyntecRetrieval (fra-Latn)":null} +{"Rank":9,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":45.5,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.15,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":75.83,"SyntecRetrieval (fra-Latn)":null} +{"Rank":10,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":52.61,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.29,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":82.77,"SyntecRetrieval (fra-Latn)":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":57.28,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":11.83,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":87.33,"SyntecRetrieval (fra-Latn)":null} +{"Rank":12,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":58.27,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":5.14,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":87.28,"SyntecRetrieval (fra-Latn)":null} +{"Rank":13,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"AlloprofRetrieval":3.1,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.36,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":28.58,"SyntecRetrieval (fra-Latn)":null} +{"Rank":14,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":35.39,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":76.88,"SyntecRetrieval (fra-Latn)":null} +{"Rank":15,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":38.36,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.14,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":79.27,"SyntecRetrieval (fra-Latn)":null} +{"Rank":16,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null} +{"Rank":17,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null} +{"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null} +{"Rank":19,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofRetrieval":1.6,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":18.95,"SyntecRetrieval (fra-Latn)":null} +{"Rank":20,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofRetrieval":1.61,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":18.95,"SyntecRetrieval (fra-Latn)":null} +{"Rank":21,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofRetrieval":1.6,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":18.95,"SyntecRetrieval (fra-Latn)":null} +{"Rank":22,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":55.42,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":26.63,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":89.48} +{"Rank":23,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"AlloprofRetrieval":29.97,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":74.2,"SyntecRetrieval (fra-Latn)":null} +{"Rank":24,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null} +{"Rank":25,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofRetrieval":21.94,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":68.62,"SyntecRetrieval (fra-Latn)":null} +{"Rank":26,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"AlloprofRetrieval":31.62,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":81.11,"SyntecRetrieval (fra-Latn)":null} +{"Rank":27,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null} +{"Rank":28,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null} +{"Rank":29,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofRetrieval":1.63,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":20.56,"SyntecRetrieval (fra-Latn)":null} +{"Rank":30,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofRetrieval":1.72,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":22.33,"SyntecRetrieval (fra-Latn)":null} +{"Rank":31,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"AlloprofRetrieval":0.58,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":1.58,"SyntecRetrieval (fra-Latn)":null} +{"Rank":32,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofRetrieval":1.63,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":18.95,"SyntecRetrieval (fra-Latn)":null} +{"Rank":33,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofRetrieval":5.51,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":34.95,"SyntecRetrieval (fra-Latn)":null} +{"Rank":34,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"AlloprofRetrieval":16.46,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":55.9,"SyntecRetrieval (fra-Latn)":null} +{"Rank":35,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":12.37,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":40.57,"SyntecRetrieval (fra-Latn)":null} +{"Rank":36,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":1.98,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":24.45,"SyntecRetrieval (fra-Latn)":null} +{"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"AlloprofRetrieval":33.2,"AlloprofRetrieval (fra-Latn)":33.2,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":6.24,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":60.8} +{"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":34.27,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":6.98,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":57.39} +{"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null} +{"Rank":40,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null} +{"Rank":41,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"AlloprofRetrieval":26.99,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":65.34,"SyntecRetrieval (fra-Latn)":null} +{"Rank":42,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null} +{"Rank":43,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null} +{"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"AlloprofRetrieval":30.23,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":58.07,"SyntecRetrieval (fra-Latn)":null} +{"Rank":45,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofRetrieval":27.52,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.16,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":67.0,"SyntecRetrieval (fra-Latn)":null} +{"Rank":46,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofRetrieval":34.52,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":71.05,"SyntecRetrieval (fra-Latn)":null} +{"Rank":47,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AlloprofRetrieval":40.38,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.14,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":74.24,"SyntecRetrieval (fra-Latn)":null} +{"Rank":48,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"AlloprofRetrieval":45.75,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":3.33,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":78.97,"SyntecRetrieval (fra-Latn)":null} +{"Rank":49,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null} +{"Rank":50,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null} +{"Rank":51,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":18.9,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":49.69,"SyntecRetrieval (fra-Latn)":null} +{"Rank":52,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":35.27,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":69.82,"SyntecRetrieval (fra-Latn)":null} +{"Rank":53,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":33.78,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":63.69,"SyntecRetrieval (fra-Latn)":null} +{"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"AlloprofRetrieval":0.16,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":3.33,"SyntecRetrieval (fra-Latn)":null} +{"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"AlloprofRetrieval":0.52,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":6.6,"SyntecRetrieval (fra-Latn)":null} +{"Rank":56,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":51.64,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.61,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":85.97,"SyntecRetrieval (fra-Latn)":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null} diff --git a/all_data_tasks/21/default.jsonl b/all_data_tasks/21/default.jsonl index 5563f96fe545d789f616f053ea9f38c92776a1f9..4faecada55d8aaa509feb3b09ead1c5da8193295 100644 --- a/all_data_tasks/21/default.jsonl +++ b/all_data_tasks/21/default.jsonl @@ -1,74 +1,57 @@ -{"index":22,"Rank":1,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.62,"STS22 (fr)":84.64,"STSBenchmarkMultilingualSTS (fr)":87.02,"SICKFr":79.2} -{"index":23,"Rank":2,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.08,"STS22 (fr)":82.84,"STSBenchmarkMultilingualSTS (fr)":86.59,"SICKFr":79.81} -{"index":10,"Rank":3,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.59,"STS22 (fr)":83.28,"STSBenchmarkMultilingualSTS (fr)":85.09,"SICKFr":79.39} -{"index":34,"Rank":4,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.31,"STS22 (fr)":82.58,"STSBenchmarkMultilingualSTS (fr)":85.46,"SICKFr":78.9} -{"index":56,"Rank":5,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.31,"STS22 (fr)":82.58,"STSBenchmarkMultilingualSTS (fr)":85.46,"SICKFr":78.9} -{"index":9,"Rank":6,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":82.31,"STS22 (fr)":82.58,"STSBenchmarkMultilingualSTS (fr)":85.46,"SICKFr":78.9} -{"index":36,"Rank":7,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":81.74,"STS22 (fr)":81.73,"STSBenchmarkMultilingualSTS (fr)":85.79,"SICKFr":77.7} -{"index":15,"Rank":8,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.28,"STS22 (fr)":82.76,"STSBenchmarkMultilingualSTS (fr)":81.84,"SICKFr":79.23} -{"index":55,"Rank":9,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.26,"STS22 (fr)":81.84,"STSBenchmarkMultilingualSTS (fr)":82.25,"SICKFr":79.68} -{"index":8,"Rank":10,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.26,"STS22 (fr)":81.84,"STSBenchmarkMultilingualSTS (fr)":82.25,"SICKFr":79.68} -{"index":80,"Rank":11,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.59,"STS22 (fr)":80.31,"STSBenchmarkMultilingualSTS (fr)":84.36,"SICKFr":77.1} -{"index":3,"Rank":12,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.29,"STS22 (fr)":83.75,"STSBenchmarkMultilingualSTS (fr)":83.02,"SICKFr":74.09} -{"index":4,"Rank":13,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.13,"STS22 (fr)":82.76,"STSBenchmarkMultilingualSTS (fr)":82.72,"SICKFr":74.9} -{"index":21,"Rank":14,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.12,"STS22 (fr)":81.14,"STSBenchmarkMultilingualSTS (fr)":83.75,"SICKFr":75.48} -{"index":29,"Rank":15,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.01,"STS22 (fr)":83.31,"STSBenchmarkMultilingualSTS (fr)":79.99,"SICKFr":76.74} -{"index":0,"Rank":16,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.56,"STS22 (fr)":82.74,"STSBenchmarkMultilingualSTS (fr)":79.72,"SICKFr":76.21} -{"index":50,"Rank":17,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.5,"STS22 (fr)":81.7,"STSBenchmarkMultilingualSTS (fr)":81.3,"SICKFr":75.51} -{"index":45,"Rank":18,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":79.37,"STS22 (fr)":76.79,"STSBenchmarkMultilingualSTS (fr)":82.53,"SICKFr":78.78} -{"index":24,"Rank":19,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.32,"STS22 (fr)":82.57,"STSBenchmarkMultilingualSTS (fr)":82.51,"SICKFr":72.88} -{"index":72,"Rank":20,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":78.37,"STS22 (fr)":76.8,"STSBenchmarkMultilingualSTS (fr)":81.24,"SICKFr":77.07} -{"index":81,"Rank":21,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.31,"STS22 (fr)":81.09,"STSBenchmarkMultilingualSTS (fr)":77.55,"SICKFr":76.28} -{"index":75,"Rank":22,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.28,"STS22 (fr)":74.1,"STSBenchmarkMultilingualSTS (fr)":83.48,"SICKFr":77.25} -{"index":14,"Rank":23,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.26,"STS22 (fr)":82.8,"STSBenchmarkMultilingualSTS (fr)":76.48,"SICKFr":75.5} -{"index":68,"Rank":24,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":78.18,"STS22 (fr)":74.3,"STSBenchmarkMultilingualSTS (fr)":84.69,"SICKFr":75.56} -{"index":35,"Rank":25,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":77.79,"STS22 (fr)":77.54,"STSBenchmarkMultilingualSTS (fr)":81.64,"SICKFr":74.18} -{"index":54,"Rank":26,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.72,"STS22 (fr)":82.35,"STSBenchmarkMultilingualSTS (fr)":79.22,"SICKFr":71.6} -{"index":53,"Rank":27,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.64,"STS22 (fr)":80.8,"STSBenchmarkMultilingualSTS (fr)":80.23,"SICKFr":71.89} -{"index":2,"Rank":28,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.52,"STS22 (fr)":79.99,"STSBenchmarkMultilingualSTS (fr)":79.02,"SICKFr":73.56} -{"index":71,"Rank":29,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":77.33,"STS22 (fr)":77.49,"STSBenchmarkMultilingualSTS (fr)":79.42,"SICKFr":75.08} -{"index":44,"Rank":30,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":77.22,"STS22 (fr)":74.8,"STSBenchmarkMultilingualSTS (fr)":80.62,"SICKFr":76.23} -{"index":47,"Rank":31,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":77.17,"STS22 (fr)":76.58,"STSBenchmarkMultilingualSTS (fr)":79.32,"SICKFr":75.62} -{"index":30,"Rank":32,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":75.87,"STS22 (fr)":78.77,"STSBenchmarkMultilingualSTS (fr)":79.23,"SICKFr":69.6} -{"index":63,"Rank":33,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":75.46,"STS22 (fr)":76.41,"STSBenchmarkMultilingualSTS (fr)":77.49,"SICKFr":72.49} -{"index":67,"Rank":34,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":75.18,"STS22 (fr)":70.55,"STSBenchmarkMultilingualSTS (fr)":79.9,"SICKFr":75.1} -{"index":70,"Rank":35,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":75.14,"STS22 (fr)":75.01,"STSBenchmarkMultilingualSTS (fr)":77.59,"SICKFr":72.83} -{"index":76,"Rank":36,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.92,"STS22 (fr)":77.91,"STSBenchmarkMultilingualSTS (fr)":75.48,"SICKFr":71.37} -{"index":52,"Rank":37,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.91,"STS22 (fr)":78.68,"STSBenchmarkMultilingualSTS (fr)":76.38,"SICKFr":69.67} -{"index":77,"Rank":38,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.55,"STS22 (fr)":71.11,"STSBenchmarkMultilingualSTS (fr)":78.16,"SICKFr":74.39} -{"index":69,"Rank":39,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":74.49,"STS22 (fr)":77.69,"STSBenchmarkMultilingualSTS (fr)":74.04,"SICKFr":71.74} -{"index":57,"Rank":40,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":74.33,"STS22 (fr)":77.95,"STSBenchmarkMultilingualSTS (fr)":75.1,"SICKFr":69.94} -{"index":51,"Rank":41,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.28,"STS22 (fr)":75.66,"STSBenchmarkMultilingualSTS (fr)":71.13,"SICKFr":70.04} -{"index":1,"Rank":42,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.82,"STS22 (fr)":70.51,"STSBenchmarkMultilingualSTS (fr)":76.43,"SICKFr":68.51} -{"index":59,"Rank":43,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":68.14,"STS22 (fr)":77.0,"STSBenchmarkMultilingualSTS (fr)":64.93,"SICKFr":62.48} -{"index":66,"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":66.86,"STS22 (fr)":74.62,"STSBenchmarkMultilingualSTS (fr)":63.85,"SICKFr":62.11} -{"index":43,"Rank":45,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":65.36,"STS22 (fr)":69.82,"STSBenchmarkMultilingualSTS (fr)":61.87,"SICKFr":64.39} -{"index":5,"Rank":46,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":64.46,"STS22 (fr)":58.61,"STSBenchmarkMultilingualSTS (fr)":69.82,"SICKFr":64.95} -{"index":48,"Rank":47,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.34,"STS22 (fr)":77.1,"STSBenchmarkMultilingualSTS (fr)":49.97,"SICKFr":59.94} -{"index":7,"Rank":48,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.33,"STS22 (fr)":67.83,"STSBenchmarkMultilingualSTS (fr)":51.98,"SICKFr":58.18} -{"index":42,"Rank":49,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":56.57,"STS22 (fr)":56.47,"STSBenchmarkMultilingualSTS (fr)":54.97,"SICKFr":58.26} -{"index":38,"Rank":50,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":52.12,"STS22 (fr)":65.37,"STSBenchmarkMultilingualSTS (fr)":37.14,"SICKFr":53.86} -{"index":49,"Rank":51,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.89,"STS22 (fr)":61.35,"STSBenchmarkMultilingualSTS (fr)":36.78,"SICKFr":54.54} -{"index":78,"Rank":52,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":50.52,"STS22 (fr)":56.72,"STSBenchmarkMultilingualSTS (fr)":46.23,"SICKFr":48.62} -{"index":17,"Rank":53,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":50.47,"STS22 (fr)":40.4,"STSBenchmarkMultilingualSTS (fr)":52.25,"SICKFr":58.77} -{"index":16,"Rank":54,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":50.44,"STS22 (fr)":40.31,"STSBenchmarkMultilingualSTS (fr)":52.25,"SICKFr":58.76} -{"index":41,"Rank":55,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":50.02,"STS22 (fr)":39.05,"STSBenchmarkMultilingualSTS (fr)":52.25,"SICKFr":58.75} -{"index":18,"Rank":56,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":49.93,"STS22 (fr)":38.77,"STSBenchmarkMultilingualSTS (fr)":52.25,"SICKFr":58.76} -{"index":79,"Rank":57,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":49.27,"STS22 (fr)":55.49,"STSBenchmarkMultilingualSTS (fr)":42.32,"SICKFr":50.01} -{"index":39,"Rank":58,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":43.49,"STS22 (fr)":55.15,"STSBenchmarkMultilingualSTS (fr)":33.41,"SICKFr":41.9} -{"index":40,"Rank":59,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":32.93,"STS22 (fr)":48.52,"STSBenchmarkMultilingualSTS (fr)":15.66,"SICKFr":34.6} -{"index":20,"Rank":65,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":79.88,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"index":25,"Rank":66,"Model":"paraphrase-multilingual-mpnet-base-v2-KE_Sieve<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":72.79,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"index":26,"Rank":67,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":67.66,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"index":27,"Rank":68,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":54.56,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"index":31,"Rank":70,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":81.47,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"index":32,"Rank":71,"Model":"sgpt-bloom-1b7-nli<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":73.13,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"index":33,"Rank":72,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":80.38,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"index":37,"Rank":73,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":61.72,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"index":58,"Rank":75,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS22 (fr)":69.51,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"index":61,"Rank":77,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS22 (fr)":53.92,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"index":62,"Rank":78,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":"","STS22 (fr)":49.43,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"index":64,"Rank":79,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","STS22 (fr)":78.7,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"index":65,"Rank":80,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","STS22 (fr)":79.43,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"index":73,"Rank":81,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":42.0,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"index":74,"Rank":82,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":74.1,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":78.8,"SICKFr":78.78,"SICKFr (fra-Latn)":78.81} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":76.0,"SICKFr":76.23,"SICKFr (fra-Latn)":75.76} +{"Rank":3,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":75.56,"SICKFr":75.56,"SICKFr (fra-Latn)":75.56} +{"Rank":4,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":75.15,"SICKFr":75.62,"SICKFr (fra-Latn)":74.67} +{"Rank":5,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":75.1,"SICKFr":75.1,"SICKFr (fra-Latn)":75.1} +{"Rank":6,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":69.94,"SICKFr":69.94,"SICKFr (fra-Latn)":69.94} +{"Rank":7,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":62.48,"SICKFr":62.48,"SICKFr (fra-Latn)":62.48} +{"Rank":8,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SICKFr":76.21,"SICKFr (fra-Latn)":null} +{"Rank":9,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SICKFr":68.51,"SICKFr (fra-Latn)":null} +{"Rank":10,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SICKFr":73.56,"SICKFr (fra-Latn)":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SICKFr":74.09,"SICKFr (fra-Latn)":null} +{"Rank":12,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SICKFr":74.9,"SICKFr (fra-Latn)":null} +{"Rank":13,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"SICKFr":64.95,"SICKFr (fra-Latn)":null} +{"Rank":14,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SICKFr":75.5,"SICKFr (fra-Latn)":null} +{"Rank":15,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SICKFr":79.23,"SICKFr (fra-Latn)":null} +{"Rank":16,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":17,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":19,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SICKFr":58.76,"SICKFr (fra-Latn)":null} +{"Rank":20,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SICKFr":58.77,"SICKFr (fra-Latn)":null} +{"Rank":21,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SICKFr":58.76,"SICKFr (fra-Latn)":null} +{"Rank":22,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"SICKFr":null,"SICKFr (fra-Latn)":76.91} +{"Rank":23,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"SICKFr":69.6,"SICKFr (fra-Latn)":null} +{"Rank":24,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":25,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SICKFr":74.18,"SICKFr (fra-Latn)":null} +{"Rank":26,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"SICKFr":77.7,"SICKFr (fra-Latn)":null} +{"Rank":27,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":28,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":29,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SICKFr":53.86,"SICKFr (fra-Latn)":null} +{"Rank":30,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SICKFr":41.9,"SICKFr (fra-Latn)":null} +{"Rank":31,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"SICKFr":34.6,"SICKFr (fra-Latn)":null} +{"Rank":32,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"SICKFr":58.75,"SICKFr (fra-Latn)":null} +{"Rank":33,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"SICKFr":58.26,"SICKFr (fra-Latn)":null} +{"Rank":34,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"SICKFr":64.39,"SICKFr (fra-Latn)":null} +{"Rank":35,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SICKFr":59.94,"SICKFr (fra-Latn)":null} +{"Rank":36,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SICKFr":54.54,"SICKFr (fra-Latn)":null} +{"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"SICKFr":null,"SICKFr (fra-Latn)":63.16} +{"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SICKFr":null,"SICKFr (fra-Latn)":67.05} +{"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":40,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":41,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"SICKFr":72.49,"SICKFr (fra-Latn)":null} +{"Rank":42,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":43,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"SICKFr":62.11,"SICKFr (fra-Latn)":null} +{"Rank":45,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SICKFr":71.74,"SICKFr (fra-Latn)":null} +{"Rank":46,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"SICKFr":72.83,"SICKFr (fra-Latn)":null} +{"Rank":47,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"SICKFr":75.08,"SICKFr (fra-Latn)":null} +{"Rank":48,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"SICKFr":77.07,"SICKFr (fra-Latn)":null} +{"Rank":49,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":50,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":51,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SICKFr":77.25,"SICKFr (fra-Latn)":null} +{"Rank":52,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SICKFr":71.37,"SICKFr (fra-Latn)":null} +{"Rank":53,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SICKFr":74.39,"SICKFr (fra-Latn)":null} +{"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"SICKFr":48.62,"SICKFr (fra-Latn)":null} +{"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"SICKFr":50.01,"SICKFr (fra-Latn)":null} +{"Rank":56,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SICKFr":76.28,"SICKFr (fra-Latn)":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SICKFr":null,"SICKFr (fra-Latn)":null} diff --git a/all_data_tasks/22/default.jsonl b/all_data_tasks/22/default.jsonl index 35ec78a57c2660c847e24feaa17ace9ee04183cc..bf31ada5bfb2c7c32e3fa192789053a7d00962fd 100644 --- a/all_data_tasks/22/default.jsonl +++ b/all_data_tasks/22/default.jsonl @@ -1,59 +1,57 @@ -{"index":53,"Rank":1,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":32.46} -{"index":43,"Rank":2,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"SummEvalFr":32.22} -{"index":47,"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"SummEvalFr":31.85} -{"index":23,"Rank":4,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":31.62} -{"index":71,"Rank":5,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"SummEvalFr":31.59} -{"index":5,"Rank":6,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"SummEvalFr":31.56} -{"index":24,"Rank":7,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":31.55} -{"index":0,"Rank":8,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":31.47} -{"index":34,"Rank":9,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":31.45} -{"index":56,"Rank":10,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":31.45} -{"index":9,"Rank":11,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"SummEvalFr":31.45} -{"index":14,"Rank":12,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":31.4} -{"index":15,"Rank":13,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":31.26} -{"index":38,"Rank":14,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEvalFr":31.26} -{"index":10,"Rank":15,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":31.26} -{"index":45,"Rank":16,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"SummEvalFr":30.92} -{"index":1,"Rank":17,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.88} -{"index":36,"Rank":18,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"SummEvalFr":30.88} -{"index":44,"Rank":19,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"SummEvalFr":30.76} -{"index":42,"Rank":20,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"SummEvalFr":30.72} -{"index":8,"Rank":21,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.5} -{"index":55,"Rank":22,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.5} -{"index":81,"Rank":23,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.5} -{"index":72,"Rank":24,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"SummEvalFr":30.39} -{"index":3,"Rank":25,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.34} -{"index":22,"Rank":26,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.3} -{"index":51,"Rank":27,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.28} -{"index":52,"Rank":28,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.27} -{"index":70,"Rank":29,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"SummEvalFr":30.23} -{"index":54,"Rank":30,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.22} -{"index":21,"Rank":31,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.16} -{"index":57,"Rank":32,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"SummEvalFr":30.16} -{"index":50,"Rank":33,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.13} -{"index":69,"Rank":34,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEvalFr":30.01} -{"index":4,"Rank":35,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":29.96} -{"index":29,"Rank":36,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":29.69} -{"index":80,"Rank":37,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":29.65} -{"index":48,"Rank":38,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":29.48} -{"index":68,"Rank":39,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"SummEvalFr":29.47} -{"index":39,"Rank":40,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEvalFr":29.43} -{"index":75,"Rank":41,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":29.33} -{"index":40,"Rank":42,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"SummEvalFr":29.25} -{"index":67,"Rank":43,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"SummEvalFr":29.2} -{"index":78,"Rank":44,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"SummEvalFr":29.14} -{"index":17,"Rank":45,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEvalFr":29.13} -{"index":16,"Rank":46,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEvalFr":29.06} -{"index":30,"Rank":47,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"SummEvalFr":29.04} -{"index":79,"Rank":48,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"SummEvalFr":28.89} -{"index":18,"Rank":49,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEvalFr":28.84} -{"index":41,"Rank":50,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"SummEvalFr":28.81} -{"index":35,"Rank":51,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEvalFr":28.77} -{"index":7,"Rank":52,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":28.72} -{"index":77,"Rank":53,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":28.56} -{"index":2,"Rank":54,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":28.34} -{"index":59,"Rank":55,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"SummEvalFr":28.28} -{"index":76,"Rank":56,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":28.21} -{"index":63,"Rank":57,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"SummEvalFr":28.12} -{"index":66,"Rank":58,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"SummEvalFr":27.59} -{"index":49,"Rank":59,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":23.63} +{"Rank":1,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":31.86,"SummEvalFr":30.76,"SummEvalFr (fra-Latn)":32.96} +{"Rank":2,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":31.5,"SummEvalFr":31.85,"SummEvalFr (fra-Latn)":31.14} +{"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":30.92,"SummEvalFr":30.92,"SummEvalFr (fra-Latn)":30.92} +{"Rank":4,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":30.16,"SummEvalFr":30.16,"SummEvalFr (fra-Latn)":30.16} +{"Rank":5,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":29.47,"SummEvalFr":29.47,"SummEvalFr (fra-Latn)":29.47} +{"Rank":6,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":29.2,"SummEvalFr":29.2,"SummEvalFr (fra-Latn)":29.2} +{"Rank":7,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":28.28,"SummEvalFr":28.28,"SummEvalFr (fra-Latn)":28.29} +{"Rank":8,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":31.47,"SummEvalFr (fra-Latn)":null} +{"Rank":9,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":30.88,"SummEvalFr (fra-Latn)":null} +{"Rank":10,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":28.34,"SummEvalFr (fra-Latn)":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":30.34,"SummEvalFr (fra-Latn)":null} +{"Rank":12,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":29.96,"SummEvalFr (fra-Latn)":null} +{"Rank":13,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"SummEvalFr":31.56,"SummEvalFr (fra-Latn)":null} +{"Rank":14,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":31.4,"SummEvalFr (fra-Latn)":null} +{"Rank":15,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":31.26,"SummEvalFr (fra-Latn)":null} +{"Rank":16,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":17,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":19,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SummEvalFr":29.06,"SummEvalFr (fra-Latn)":null} +{"Rank":20,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SummEvalFr":29.13,"SummEvalFr (fra-Latn)":null} +{"Rank":21,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SummEvalFr":28.84,"SummEvalFr (fra-Latn)":null} +{"Rank":22,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":29.97} +{"Rank":23,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"SummEvalFr":29.04,"SummEvalFr (fra-Latn)":null} +{"Rank":24,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":25,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SummEvalFr":28.77,"SummEvalFr (fra-Latn)":null} +{"Rank":26,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"SummEvalFr":30.88,"SummEvalFr (fra-Latn)":null} +{"Rank":27,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":28,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":29,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SummEvalFr":31.26,"SummEvalFr (fra-Latn)":null} +{"Rank":30,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SummEvalFr":29.43,"SummEvalFr (fra-Latn)":null} +{"Rank":31,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"SummEvalFr":29.25,"SummEvalFr (fra-Latn)":null} +{"Rank":32,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"SummEvalFr":28.81,"SummEvalFr (fra-Latn)":null} +{"Rank":33,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"SummEvalFr":30.72,"SummEvalFr (fra-Latn)":null} +{"Rank":34,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"SummEvalFr":32.22,"SummEvalFr (fra-Latn)":null} +{"Rank":35,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":29.48,"SummEvalFr (fra-Latn)":null} +{"Rank":36,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":23.63,"SummEvalFr (fra-Latn)":null} +{"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":26.63} +{"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":28.11} +{"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":40,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":41,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"SummEvalFr":28.12,"SummEvalFr (fra-Latn)":null} +{"Rank":42,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":43,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"SummEvalFr":27.59,"SummEvalFr (fra-Latn)":null} +{"Rank":45,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SummEvalFr":30.01,"SummEvalFr (fra-Latn)":null} +{"Rank":46,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"SummEvalFr":30.23,"SummEvalFr (fra-Latn)":null} +{"Rank":47,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"SummEvalFr":31.59,"SummEvalFr (fra-Latn)":null} +{"Rank":48,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"SummEvalFr":30.39,"SummEvalFr (fra-Latn)":null} +{"Rank":49,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":50,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":51,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":29.33,"SummEvalFr (fra-Latn)":null} +{"Rank":52,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":28.21,"SummEvalFr (fra-Latn)":null} +{"Rank":53,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":28.56,"SummEvalFr (fra-Latn)":null} +{"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"SummEvalFr":29.14,"SummEvalFr (fra-Latn)":null} +{"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"SummEvalFr":28.89,"SummEvalFr (fra-Latn)":null} +{"Rank":56,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":30.5,"SummEvalFr (fra-Latn)":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} diff --git a/all_data_tasks/23/default.jsonl b/all_data_tasks/23/default.jsonl index a9d1ab184341508fa2d07f84f5c7a91f2a7dcae7..d2efe60de93c56eeca8f8b7fc5b011495104b438 100644 --- a/all_data_tasks/23/default.jsonl +++ b/all_data_tasks/23/default.jsonl @@ -1,47 +1,32 @@ -{"index":13,"Rank":1,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":66.73,"MassiveIntentClassification (nb)":62.68,"MassiveScenarioClassification (nb)":67.44,"NoRecClassification":55.46,"NordicLangClassification":85.27,"NorwegianParliament":62.58,"ScalaNbClassification":66.97} -{"index":12,"Rank":2,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":63.94,"MassiveIntentClassification (nb)":60.67,"MassiveScenarioClassification (nb)":67.31,"NoRecClassification":51.32,"NordicLangClassification":84.69,"NorwegianParliament":57.41,"ScalaNbClassification":62.25} -{"index":24,"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":63.64,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":62.76,"NordicLangClassification":82.29,"NorwegianParliament":60.36,"ScalaNbClassification":50.44} -{"index":30,"Rank":4,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"Average":61.75,"MassiveIntentClassification (nb)":54.2,"MassiveScenarioClassification (nb)":60.69,"NoRecClassification":53.4,"NordicLangClassification":82.67,"NorwegianParliament":59.33,"ScalaNbClassification":60.19} -{"index":23,"Rank":5,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":61.63,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":57.58,"NordicLangClassification":75.94,"NorwegianParliament":59.94,"ScalaNbClassification":50.32} -{"index":17,"Rank":6,"Model":"dfm-sentence-encoder-large-1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":61.34,"MassiveIntentClassification (nb)":57.57,"MassiveScenarioClassification (nb)":63.66,"NoRecClassification":50.46,"NordicLangClassification":75.98,"NorwegianParliament":57.66,"ScalaNbClassification":62.69} -{"index":31,"Rank":7,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"Average":60.34,"MassiveIntentClassification (nb)":47.42,"MassiveScenarioClassification (nb)":54.25,"NoRecClassification":50.46,"NordicLangClassification":84.25,"NorwegianParliament":58.85,"ScalaNbClassification":66.79} -{"index":26,"Rank":8,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":58.86,"MassiveIntentClassification (nb)":53.96,"MassiveScenarioClassification (nb)":59.9,"NoRecClassification":53.96,"NordicLangClassification":75.15,"NorwegianParliament":60.15,"ScalaNbClassification":50.06} -{"index":16,"Rank":9,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":58.46,"MassiveIntentClassification (nb)":52.49,"MassiveScenarioClassification (nb)":54.59,"NoRecClassification":48.3,"NordicLangClassification":77.68,"NorwegianParliament":58.78,"ScalaNbClassification":58.95} -{"index":45,"Rank":10,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":55.0,"MassiveIntentClassification (nb)":45.38,"MassiveScenarioClassification (nb)":47.55,"NoRecClassification":46.06,"NordicLangClassification":74.25,"NorwegianParliament":56.79,"ScalaNbClassification":59.99} -{"index":18,"Rank":11,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.92,"MassiveIntentClassification (nb)":59.9,"MassiveScenarioClassification (nb)":65.81,"NoRecClassification":48.25,"NordicLangClassification":48.4,"NorwegianParliament":55.99,"ScalaNbClassification":51.18} -{"index":46,"Rank":12,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":54.34,"MassiveIntentClassification (nb)":40.46,"MassiveScenarioClassification (nb)":44.83,"NoRecClassification":46.28,"NordicLangClassification":79.39,"NorwegianParliament":56.75,"ScalaNbClassification":58.33} -{"index":19,"Rank":13,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":50.14,"MassiveIntentClassification (nb)":41.57,"MassiveScenarioClassification (nb)":50.33,"NoRecClassification":42.0,"NordicLangClassification":59.34,"NorwegianParliament":57.42,"ScalaNbClassification":50.18} -{"index":20,"Rank":14,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":50.01,"MassiveIntentClassification (nb)":40.63,"MassiveScenarioClassification (nb)":51.91,"NoRecClassification":41.83,"NordicLangClassification":58.3,"NorwegianParliament":57.26,"ScalaNbClassification":50.13} -{"index":8,"Rank":15,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":48.88,"MassiveIntentClassification (nb)":42.74,"MassiveScenarioClassification (nb)":49.49,"NoRecClassification":43.53,"NordicLangClassification":51.45,"NorwegianParliament":55.74,"ScalaNbClassification":50.34} -{"index":22,"Rank":16,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":48.46,"MassiveIntentClassification (nb)":40.25,"MassiveScenarioClassification (nb)":48.58,"NoRecClassification":41.84,"NordicLangClassification":53.47,"NorwegianParliament":56.57,"ScalaNbClassification":50.03} -{"index":6,"Rank":17,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":48.18,"MassiveIntentClassification (nb)":35.75,"MassiveScenarioClassification (nb)":35.76,"NoRecClassification":43.91,"NordicLangClassification":62.45,"NorwegianParliament":57.56,"ScalaNbClassification":53.63} -{"index":34,"Rank":18,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.28,"MassiveIntentClassification (nb)":39.34,"MassiveScenarioClassification (nb)":44.67,"NoRecClassification":40.02,"NordicLangClassification":54.71,"NorwegianParliament":54.8,"ScalaNbClassification":50.17} -{"index":29,"Rank":19,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.28,"MassiveIntentClassification (nb)":24.6,"MassiveScenarioClassification (nb)":27.3,"NoRecClassification":45.44,"NordicLangClassification":57.82,"NorwegianParliament":53.25,"ScalaNbClassification":75.28} -{"index":7,"Rank":20,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":34.34,"MassiveIntentClassification (nb)":5.66,"MassiveScenarioClassification (nb)":11.26,"NoRecClassification":39.72,"NordicLangClassification":44.53,"NorwegianParliament":52.44,"ScalaNbClassification":52.41} -{"index":0,"Rank":21,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","MassiveIntentClassification (nb)":37.74,"MassiveScenarioClassification (nb)":43.9,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":1,"Rank":22,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":36.89,"MassiveScenarioClassification (nb)":44.27,"NoRecClassification":43.53,"NordicLangClassification":"","NorwegianParliament":54.9,"ScalaNbClassification":""} -{"index":2,"Rank":23,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":28.65,"MassiveScenarioClassification (nb)":35.24,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":3,"Rank":24,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":31.07,"MassiveScenarioClassification (nb)":38.02,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":4,"Rank":25,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":5,"Rank":26,"Model":"e5-dansk-test-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":61.0,"NorwegianParliament":"","ScalaNbClassification":""} -{"index":9,"Rank":27,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":39.67,"MassiveScenarioClassification (nb)":50.89,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":10,"Rank":28,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":39.82,"MassiveScenarioClassification (nb)":39.02,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":11,"Rank":29,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":39.48,"MassiveScenarioClassification (nb)":40.47,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":14,"Rank":30,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":54.41,"MassiveScenarioClassification (nb)":64.64,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":15,"Rank":31,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":49.41,"MassiveScenarioClassification (nb)":51.8,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":21,"Rank":32,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","MassiveIntentClassification (nb)":70.93,"MassiveScenarioClassification (nb)":75.7,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":25,"Rank":33,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":71.66,"MassiveScenarioClassification (nb)":77.21,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":27,"Rank":34,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":46.18,"MassiveScenarioClassification (nb)":50.32,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":28,"Rank":35,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":44.12,"MassiveScenarioClassification (nb)":46.79,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":32,"Rank":36,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","MassiveIntentClassification (nb)":57.91,"MassiveScenarioClassification (nb)":64.29,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":33,"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","MassiveIntentClassification (nb)":41.91,"MassiveScenarioClassification (nb)":47.36,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":35,"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":36,"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":34.75,"MassiveScenarioClassification (nb)":35.71,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":37,"Rank":40,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","MassiveIntentClassification (nb)":46.01,"MassiveScenarioClassification (nb)":54.98,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":38,"Rank":41,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":43.78,"MassiveScenarioClassification (nb)":52.71,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":39,"Rank":42,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.87,"MassiveScenarioClassification (nb)":54.44,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":40,"Rank":43,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":38.53,"MassiveScenarioClassification (nb)":46.6,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":41,"Rank":44,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":41.29,"MassiveScenarioClassification (nb)":49.92,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":42,"Rank":45,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.91,"MassiveScenarioClassification (nb)":53.43,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":43,"Rank":46,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":31.49,"MassiveScenarioClassification (nb)":38.05,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":44,"Rank":47,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":54.64,"MassiveScenarioClassification (nb)":60.26,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":65.06,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":62.76,"NoRecClassification (nob-Latn)":58.43,"NordicLangClassification":82.29,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":80.15,"NorwegianParliament":60.36,"ScalaNbClassification":50.44} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":62.42,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":57.58,"NoRecClassification (nob-Latn)":53.74,"NordicLangClassification":75.94,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":75.85,"NorwegianParliament":59.94,"ScalaNbClassification":50.32} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":59.43,"MassiveIntentClassification (nb)":53.96,"MassiveScenarioClassification (nb)":59.9,"NoRecClassification":53.96,"NoRecClassification (nob-Latn)":50.08,"NordicLangClassification":75.15,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":72.15,"NorwegianParliament":60.15,"ScalaNbClassification":50.06} +{"Rank":4,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.04,"MassiveIntentClassification (nb)":39.34,"MassiveScenarioClassification (nb)":44.67,"NoRecClassification":40.02,"NoRecClassification (nob-Latn)":37.93,"NordicLangClassification":54.71,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":54.7,"NorwegianParliament":54.8,"ScalaNbClassification":50.17} +{"Rank":5,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","MassiveIntentClassification (nb)":37.74,"MassiveScenarioClassification (nb)":43.9,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":6,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NoRecClassification (nob-Latn)":52.05,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":63.6,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":7,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","MassiveIntentClassification (nb)":35.75,"MassiveScenarioClassification (nb)":35.76,"NoRecClassification":43.91,"NoRecClassification (nob-Latn)":"","NordicLangClassification":62.45,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":57.56,"ScalaNbClassification":53.63} +{"Rank":8,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":"","MassiveIntentClassification (nb)":5.66,"MassiveScenarioClassification (nb)":11.26,"NoRecClassification":39.72,"NoRecClassification (nob-Latn)":"","NordicLangClassification":44.53,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":52.44,"ScalaNbClassification":52.41} +{"Rank":9,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","MassiveIntentClassification (nb)":42.74,"MassiveScenarioClassification (nb)":49.49,"NoRecClassification":43.53,"NoRecClassification (nob-Latn)":"","NordicLangClassification":51.45,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":55.74,"ScalaNbClassification":50.34} +{"Rank":10,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","MassiveIntentClassification (nb)":60.67,"MassiveScenarioClassification (nb)":67.31,"NoRecClassification":51.32,"NoRecClassification (nob-Latn)":"","NordicLangClassification":84.69,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":57.41,"ScalaNbClassification":62.25} +{"Rank":11,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":"","MassiveIntentClassification (nb)":62.68,"MassiveScenarioClassification (nb)":67.44,"NoRecClassification":55.46,"NoRecClassification (nob-Latn)":"","NordicLangClassification":85.27,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":62.58,"ScalaNbClassification":66.97} +{"Rank":12,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":"","MassiveIntentClassification (nb)":52.49,"MassiveScenarioClassification (nb)":54.59,"NoRecClassification":48.3,"NoRecClassification (nob-Latn)":"","NordicLangClassification":77.68,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":58.78,"ScalaNbClassification":58.95} +{"Rank":13,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":41.57,"MassiveScenarioClassification (nb)":50.33,"NoRecClassification":42.0,"NoRecClassification (nob-Latn)":"","NordicLangClassification":59.34,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":57.42,"ScalaNbClassification":50.18} +{"Rank":14,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","MassiveIntentClassification (nb)":40.63,"MassiveScenarioClassification (nb)":51.91,"NoRecClassification":41.83,"NoRecClassification (nob-Latn)":"","NordicLangClassification":58.3,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":57.26,"ScalaNbClassification":50.13} +{"Rank":15,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","MassiveIntentClassification (nb)":40.25,"MassiveScenarioClassification (nb)":48.58,"NoRecClassification":41.84,"NoRecClassification (nob-Latn)":"","NordicLangClassification":53.47,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":56.57,"ScalaNbClassification":50.03} +{"Rank":16,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","MassiveIntentClassification (nb)":24.6,"MassiveScenarioClassification (nb)":27.3,"NoRecClassification":45.44,"NoRecClassification (nob-Latn)":"","NordicLangClassification":57.82,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":53.25,"ScalaNbClassification":75.28} +{"Rank":17,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"Average":"","MassiveIntentClassification (nb)":54.2,"MassiveScenarioClassification (nb)":60.69,"NoRecClassification":53.4,"NoRecClassification (nob-Latn)":"","NordicLangClassification":82.67,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":59.33,"ScalaNbClassification":60.19} +{"Rank":18,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"Average":"","MassiveIntentClassification (nb)":47.42,"MassiveScenarioClassification (nb)":54.25,"NoRecClassification":50.46,"NoRecClassification (nob-Latn)":"","NordicLangClassification":84.25,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":58.85,"ScalaNbClassification":66.79} +{"Rank":19,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","MassiveIntentClassification (nb)":57.91,"MassiveScenarioClassification (nb)":64.29,"NoRecClassification":"","NoRecClassification (nob-Latn)":45.45,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":35.39,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":20,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","MassiveIntentClassification (nb)":41.91,"MassiveScenarioClassification (nb)":47.36,"NoRecClassification":"","NoRecClassification (nob-Latn)":37.73,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":54.17,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":21,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NoRecClassification (nob-Latn)":38.34,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":50.15,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":22,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":34.75,"MassiveScenarioClassification (nb)":35.71,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":23,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","MassiveIntentClassification (nb)":46.01,"MassiveScenarioClassification (nb)":54.98,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":24,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":43.78,"MassiveScenarioClassification (nb)":52.71,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":25,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.87,"MassiveScenarioClassification (nb)":54.44,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":26,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NoRecClassification (nob-Latn)":46.7,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":42.52,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":27,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NoRecClassification (nob-Latn)":50.32,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":41.57,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":28,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":38.53,"MassiveScenarioClassification (nb)":46.6,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":29,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":41.29,"MassiveScenarioClassification (nb)":49.92,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":30,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.91,"MassiveScenarioClassification (nb)":53.43,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":31,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","MassiveIntentClassification (nb)":45.38,"MassiveScenarioClassification (nb)":47.55,"NoRecClassification":46.06,"NoRecClassification (nob-Latn)":"","NordicLangClassification":74.25,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":56.79,"ScalaNbClassification":59.99} +{"Rank":32,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","MassiveIntentClassification (nb)":40.46,"MassiveScenarioClassification (nb)":44.83,"NoRecClassification":46.28,"NoRecClassification (nob-Latn)":"","NordicLangClassification":79.39,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":56.75,"ScalaNbClassification":58.33} diff --git a/all_data_tasks/24/default.jsonl b/all_data_tasks/24/default.jsonl index 65866e767e2f5be3985e2e532f00f237fe5f85ff..fb7dfe5e3172f53849b302706f2add0f6315f1a1 100644 --- a/all_data_tasks/24/default.jsonl +++ b/all_data_tasks/24/default.jsonl @@ -1,21 +1,21 @@ -{"index":15,"Rank":1,"Model":"FollowIR-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":12.15,"Core17InstructionRetrieval":16.48,"News21InstructionRetrieval":6.26,"Robust04InstructionRetrieval":13.72} -{"index":17,"Rank":2,"Model":"mistral-7b-instruct-v0.2<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":10.15,"Core17InstructionRetrieval":13.03,"News21InstructionRetrieval":4.81,"Robust04InstructionRetrieval":12.61} -{"index":9,"Rank":3,"Model":"flan-t5-large<\/a>","Model Size (Million Parameters)":770,"Memory Usage (GB, fp32)":2.87,"Average":4.72,"Core17InstructionRetrieval":1.32,"News21InstructionRetrieval":8.95,"Robust04InstructionRetrieval":3.9} -{"index":5,"Rank":4,"Model":"monot5-3b-msmarco-10k<\/a>","Model Size (Million Parameters)":2480,"Memory Usage (GB, fp32)":9.24,"Average":2.53,"Core17InstructionRetrieval":1.84,"News21InstructionRetrieval":1.78,"Robust04InstructionRetrieval":3.96} -{"index":0,"Rank":5,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":2.33,"Core17InstructionRetrieval":5.44,"News21InstructionRetrieval":3.94,"Robust04InstructionRetrieval":-2.4} -{"index":16,"Rank":6,"Model":"llama-2-7b-chat<\/a>","Model Size (Million Parameters)":7000,"Memory Usage (GB, fp32)":26.08,"Average":1.69,"Core17InstructionRetrieval":2.84,"News21InstructionRetrieval":0.23,"Robust04InstructionRetrieval":2.0} -{"index":7,"Rank":7,"Model":"tart-full-flan-t5-xl<\/a>","Model Size (Million Parameters)":2480,"Memory Usage (GB, fp32)":9.24,"Average":1.36,"Core17InstructionRetrieval":2.82,"News21InstructionRetrieval":1.99,"Robust04InstructionRetrieval":-0.72} -{"index":8,"Rank":8,"Model":"flan-t5-base<\/a>","Model Size (Million Parameters)":220,"Memory Usage (GB, fp32)":0.82,"Average":0.64,"Core17InstructionRetrieval":-3.31,"News21InstructionRetrieval":-0.12,"Robust04InstructionRetrieval":5.35} -{"index":3,"Rank":9,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":-0.02,"Core17InstructionRetrieval":2.62,"News21InstructionRetrieval":-1.01,"Robust04InstructionRetrieval":-1.68} -{"index":2,"Rank":10,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":-0.21,"Core17InstructionRetrieval":2.8,"News21InstructionRetrieval":0.2,"Robust04InstructionRetrieval":-3.63} -{"index":13,"Rank":11,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":-1.06,"Core17InstructionRetrieval":0.12,"News21InstructionRetrieval":0.87,"Robust04InstructionRetrieval":-4.16} -{"index":6,"Rank":12,"Model":"monot5-base-msmarco-10k<\/a>","Model Size (Million Parameters)":220,"Memory Usage (GB, fp32)":0.82,"Average":-1.75,"Core17InstructionRetrieval":-4.06,"News21InstructionRetrieval":5.02,"Robust04InstructionRetrieval":-6.2} -{"index":1,"Rank":13,"Model":"bm25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":-2.09,"Core17InstructionRetrieval":-1.06,"News21InstructionRetrieval":-2.15,"Robust04InstructionRetrieval":-3.06} -{"index":20,"Rank":14,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":-2.68,"Core17InstructionRetrieval":-0.2,"News21InstructionRetrieval":-2.03,"Robust04InstructionRetrieval":-5.81} -{"index":11,"Rank":15,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":-2.76,"Core17InstructionRetrieval":0.69,"News21InstructionRetrieval":-0.9,"Robust04InstructionRetrieval":-8.08} -{"index":14,"Rank":16,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":-3.45,"Core17InstructionRetrieval":0.09,"News21InstructionRetrieval":-0.86,"Robust04InstructionRetrieval":-9.59} -{"index":4,"Rank":17,"Model":"monobert-large-msmarco<\/a>","Model Size (Million Parameters)":770,"Memory Usage (GB, fp32)":2.87,"Average":-3.47,"Core17InstructionRetrieval":-0.24,"News21InstructionRetrieval":-0.8,"Robust04InstructionRetrieval":-9.36} -{"index":18,"Rank":18,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":-3.81,"Core17InstructionRetrieval":-2.48,"News21InstructionRetrieval":-2.83,"Robust04InstructionRetrieval":-6.12} -{"index":12,"Rank":19,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":-3.88,"Core17InstructionRetrieval":-2.9,"News21InstructionRetrieval":-2.0,"Robust04InstructionRetrieval":-6.73} -{"index":10,"Rank":20,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":-4.43,"Core17InstructionRetrieval":-1.09,"News21InstructionRetrieval":-1.78,"Robust04InstructionRetrieval":-10.42} -{"index":19,"Rank":21,"Model":"tart-dual-contriever-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":-5.0,"Core17InstructionRetrieval":-3.04,"News21InstructionRetrieval":-2.98,"Robust04InstructionRetrieval":-8.98} +{"Rank":1,"Model":"FollowIR-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":12.15,"Core17InstructionRetrieval":16.48,"News21InstructionRetrieval":6.26,"Robust04InstructionRetrieval":13.72} +{"Rank":2,"Model":"mistral-7b-instruct-v0.2<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":10.15,"Core17InstructionRetrieval":13.03,"News21InstructionRetrieval":4.81,"Robust04InstructionRetrieval":12.61} +{"Rank":3,"Model":"flan-t5-large<\/a>","Model Size (Million Parameters)":770,"Memory Usage (GB, fp32)":2.87,"Average":4.72,"Core17InstructionRetrieval":1.32,"News21InstructionRetrieval":8.95,"Robust04InstructionRetrieval":3.9} +{"Rank":4,"Model":"monot5-3b-msmarco-10k<\/a>","Model Size (Million Parameters)":2480,"Memory Usage (GB, fp32)":9.24,"Average":2.53,"Core17InstructionRetrieval":1.84,"News21InstructionRetrieval":1.78,"Robust04InstructionRetrieval":3.96} +{"Rank":5,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":2.33,"Core17InstructionRetrieval":5.44,"News21InstructionRetrieval":3.94,"Robust04InstructionRetrieval":-2.4} +{"Rank":6,"Model":"llama-2-7b-chat<\/a>","Model Size (Million Parameters)":7000,"Memory Usage (GB, fp32)":26.08,"Average":1.69,"Core17InstructionRetrieval":2.84,"News21InstructionRetrieval":0.23,"Robust04InstructionRetrieval":2.0} +{"Rank":7,"Model":"tart-full-flan-t5-xl<\/a>","Model Size (Million Parameters)":2480,"Memory Usage (GB, fp32)":9.24,"Average":1.36,"Core17InstructionRetrieval":2.82,"News21InstructionRetrieval":1.99,"Robust04InstructionRetrieval":-0.72} +{"Rank":8,"Model":"flan-t5-base<\/a>","Model Size (Million Parameters)":220,"Memory Usage (GB, fp32)":0.82,"Average":0.64,"Core17InstructionRetrieval":-3.31,"News21InstructionRetrieval":-0.12,"Robust04InstructionRetrieval":5.35} +{"Rank":9,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":-0.02,"Core17InstructionRetrieval":2.62,"News21InstructionRetrieval":-1.01,"Robust04InstructionRetrieval":-1.68} +{"Rank":10,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":-0.21,"Core17InstructionRetrieval":2.8,"News21InstructionRetrieval":0.2,"Robust04InstructionRetrieval":-3.63} +{"Rank":11,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":-1.06,"Core17InstructionRetrieval":0.12,"News21InstructionRetrieval":0.87,"Robust04InstructionRetrieval":-4.16} +{"Rank":12,"Model":"monot5-base-msmarco-10k<\/a>","Model Size (Million Parameters)":220,"Memory Usage (GB, fp32)":0.82,"Average":-1.75,"Core17InstructionRetrieval":-4.06,"News21InstructionRetrieval":5.02,"Robust04InstructionRetrieval":-6.2} +{"Rank":13,"Model":"bm25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":-2.09,"Core17InstructionRetrieval":-1.06,"News21InstructionRetrieval":-2.15,"Robust04InstructionRetrieval":-3.06} +{"Rank":14,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":-2.68,"Core17InstructionRetrieval":-0.2,"News21InstructionRetrieval":-2.03,"Robust04InstructionRetrieval":-5.81} +{"Rank":15,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":-2.76,"Core17InstructionRetrieval":0.69,"News21InstructionRetrieval":-0.9,"Robust04InstructionRetrieval":-8.08} +{"Rank":16,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":-3.45,"Core17InstructionRetrieval":0.09,"News21InstructionRetrieval":-0.86,"Robust04InstructionRetrieval":-9.59} +{"Rank":17,"Model":"monobert-large-msmarco<\/a>","Model Size (Million Parameters)":770,"Memory Usage (GB, fp32)":2.87,"Average":-3.47,"Core17InstructionRetrieval":-0.24,"News21InstructionRetrieval":-0.8,"Robust04InstructionRetrieval":-9.36} +{"Rank":18,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":-3.81,"Core17InstructionRetrieval":-2.48,"News21InstructionRetrieval":-2.83,"Robust04InstructionRetrieval":-6.12} +{"Rank":19,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":-3.88,"Core17InstructionRetrieval":-2.9,"News21InstructionRetrieval":-2.0,"Robust04InstructionRetrieval":-6.73} +{"Rank":20,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":-4.43,"Core17InstructionRetrieval":-1.09,"News21InstructionRetrieval":-1.78,"Robust04InstructionRetrieval":-10.42} +{"Rank":21,"Model":"tart-dual-contriever-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":-5.0,"Core17InstructionRetrieval":-3.04,"News21InstructionRetrieval":-2.98,"Robust04InstructionRetrieval":-8.98} diff --git a/all_data_tasks/25/default.jsonl b/all_data_tasks/25/default.jsonl index 568f75d3a1f178a1c9c1c8fcd0c9391e2fc7ce9e..aa49de0a498f7764414a3ec0e47940e1b74beeda 100644 --- a/all_data_tasks/25/default.jsonl +++ b/all_data_tasks/25/default.jsonl @@ -1,11 +1,17 @@ -{"index":1,"Rank":1,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.39,"AILACasedocs":44.56,"AILAStatutes":45.51,"GerDaLIRSmall":44.91,"LeCaRDv2":72.75,"LegalBenchConsumerContractsQA":83.27,"LegalBenchCorporateLobbying":95.66,"LegalQuAD":67.47,"LegalSummarization":68.96} -{"index":5,"Rank":2,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":59.77,"AILACasedocs":38.76,"AILAStatutes":38.07,"GerDaLIRSmall":37.18,"LeCaRDv2":68.56,"LegalBenchConsumerContractsQA":75.46,"LegalBenchCorporateLobbying":94.01,"LegalQuAD":59.64,"LegalSummarization":66.51} -{"index":10,"Rank":3,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.22,"AILACasedocs":39.0,"AILAStatutes":41.31,"GerDaLIRSmall":32.77,"LeCaRDv2":57.2,"LegalBenchConsumerContractsQA":79.39,"LegalBenchCorporateLobbying":95.09,"LegalQuAD":57.47,"LegalSummarization":71.55} -{"index":0,"Rank":4,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.43,"AILACasedocs":38.2,"AILAStatutes":44.81,"GerDaLIRSmall":17.85,"LeCaRDv2":61.12,"LegalBenchConsumerContractsQA":80.8,"LegalBenchCorporateLobbying":94.11,"LegalQuAD":47.17,"LegalSummarization":67.39} -{"index":3,"Rank":5,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.04,"AILACasedocs":31.54,"AILAStatutes":27.15,"GerDaLIRSmall":6.05,"LeCaRDv2":21.02,"LegalBenchConsumerContractsQA":77.12,"LegalBenchCorporateLobbying":93.68,"LegalQuAD":26.08,"LegalSummarization":61.7} -{"index":2,"Rank":6,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":39.22,"AILACasedocs":25.15,"AILAStatutes":20.74,"GerDaLIRSmall":3.96,"LeCaRDv2":22.68,"LegalBenchConsumerContractsQA":73.52,"LegalBenchCorporateLobbying":91.51,"LegalQuAD":16.22,"LegalSummarization":59.99} -{"index":4,"Rank":7,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","AILACasedocs":35.31,"AILAStatutes":41.8,"GerDaLIRSmall":"","LeCaRDv2":"","LegalBenchConsumerContractsQA":82.1,"LegalBenchCorporateLobbying":95.0,"LegalQuAD":"","LegalSummarization":70.64} -{"index":6,"Rank":8,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AILACasedocs":16.8,"AILAStatutes":20.71,"GerDaLIRSmall":"","LeCaRDv2":"","LegalBenchConsumerContractsQA":60.21,"LegalBenchCorporateLobbying":88.69,"LegalQuAD":"","LegalSummarization":57.43} -{"index":7,"Rank":9,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","AILACasedocs":19.72,"AILAStatutes":20.52,"GerDaLIRSmall":"","LeCaRDv2":"","LegalBenchConsumerContractsQA":65.6,"LegalBenchCorporateLobbying":86.41,"LegalQuAD":"","LegalSummarization":59.0} -{"index":8,"Rank":10,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AILACasedocs":22.51,"AILAStatutes":21.27,"GerDaLIRSmall":"","LeCaRDv2":"","LegalBenchConsumerContractsQA":75.25,"LegalBenchCorporateLobbying":89.04,"LegalQuAD":"","LegalSummarization":58.55} -{"index":9,"Rank":11,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AILACasedocs":"","AILAStatutes":"","GerDaLIRSmall":"","LeCaRDv2":"","LegalBenchConsumerContractsQA":"","LegalBenchCorporateLobbying":"","LegalQuAD":"","LegalSummarization":""} +{"Rank":1,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AILACasedocs":38.2,"AILAStatutes":44.81,"GerDaLIRSmall":17.85,"GerDaLIRSmall (deu-Latn)":"","LeCaRDv2":61.12,"LeCaRDv2 (zho-Hans)":"","LegalBenchConsumerContractsQA":80.8,"LegalBenchCorporateLobbying":94.11,"LegalQuAD":47.17,"LegalQuAD (deu-Latn)":"","LegalSummarization":67.39} +{"Rank":2,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AILACasedocs":44.56,"AILAStatutes":45.51,"GerDaLIRSmall":44.91,"GerDaLIRSmall (deu-Latn)":"","LeCaRDv2":72.75,"LeCaRDv2 (zho-Hans)":"","LegalBenchConsumerContractsQA":83.27,"LegalBenchCorporateLobbying":95.66,"LegalQuAD":67.47,"LegalQuAD (deu-Latn)":"","LegalSummarization":68.96} +{"Rank":3,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":"","AILACasedocs":25.15,"AILAStatutes":20.74,"GerDaLIRSmall":3.96,"GerDaLIRSmall (deu-Latn)":"","LeCaRDv2":22.68,"LeCaRDv2 (zho-Hans)":"","LegalBenchConsumerContractsQA":73.52,"LegalBenchCorporateLobbying":91.51,"LegalQuAD":16.22,"LegalQuAD (deu-Latn)":"","LegalSummarization":59.99} +{"Rank":4,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AILACasedocs":31.54,"AILAStatutes":27.15,"GerDaLIRSmall":6.05,"GerDaLIRSmall (deu-Latn)":"","LeCaRDv2":21.02,"LeCaRDv2 (zho-Hans)":"","LegalBenchConsumerContractsQA":77.12,"LegalBenchCorporateLobbying":93.68,"LegalQuAD":26.08,"LegalQuAD (deu-Latn)":"","LegalSummarization":61.7} +{"Rank":5,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","AILACasedocs":35.31,"AILAStatutes":41.8,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":20.61,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":64.05,"LegalBenchConsumerContractsQA":82.1,"LegalBenchCorporateLobbying":95.0,"LegalQuAD":"","LegalQuAD (deu-Latn)":44.18,"LegalSummarization":70.64} +{"Rank":6,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","AILACasedocs":38.76,"AILAStatutes":38.07,"GerDaLIRSmall":37.18,"GerDaLIRSmall (deu-Latn)":"","LeCaRDv2":68.56,"LeCaRDv2 (zho-Hans)":"","LegalBenchConsumerContractsQA":75.46,"LegalBenchCorporateLobbying":94.01,"LegalQuAD":59.64,"LegalQuAD (deu-Latn)":"","LegalSummarization":66.51} +{"Rank":7,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AILACasedocs":26.05,"AILAStatutes":20.37,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":15.3,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":59.0,"LegalBenchConsumerContractsQA":69.02,"LegalBenchCorporateLobbying":88.97,"LegalQuAD":"","LegalQuAD (deu-Latn)":47.85,"LegalSummarization":61.69} +{"Rank":8,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","AILACasedocs":26.43,"AILAStatutes":20.84,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":15.72,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":55.83,"LegalBenchConsumerContractsQA":73.3,"LegalBenchCorporateLobbying":89.72,"LegalQuAD":"","LegalQuAD (deu-Latn)":43.17,"LegalSummarization":62.1} +{"Rank":9,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AILACasedocs":23.43,"AILAStatutes":19.01,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":14.81,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":61.58,"LegalBenchConsumerContractsQA":66.98,"LegalBenchCorporateLobbying":89.47,"LegalQuAD":"","LegalQuAD (deu-Latn)":47.8,"LegalSummarization":55.76} +{"Rank":10,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","AILACasedocs":17.67,"AILAStatutes":16.72,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":4.59,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":24.68,"LegalBenchConsumerContractsQA":54.66,"LegalBenchCorporateLobbying":69.39,"LegalQuAD":"","LegalQuAD (deu-Latn)":16.64,"LegalSummarization":53.89} +{"Rank":11,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AILACasedocs":16.8,"AILAStatutes":20.71,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":1.35,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":18.77,"LegalBenchConsumerContractsQA":60.21,"LegalBenchCorporateLobbying":88.69,"LegalQuAD":"","LegalQuAD (deu-Latn)":7.44,"LegalSummarization":57.43} +{"Rank":12,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","AILACasedocs":19.72,"AILAStatutes":20.52,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":2.41,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":17.5,"LegalBenchConsumerContractsQA":65.6,"LegalBenchCorporateLobbying":86.41,"LegalQuAD":"","LegalQuAD (deu-Latn)":11.81,"LegalSummarization":59.0} +{"Rank":13,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AILACasedocs":22.51,"AILAStatutes":21.27,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":3.78,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":18.09,"LegalBenchConsumerContractsQA":75.25,"LegalBenchCorporateLobbying":89.04,"LegalQuAD":"","LegalQuAD (deu-Latn)":10.67,"LegalSummarization":58.55} +{"Rank":14,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AILACasedocs":13.66,"AILAStatutes":20.52,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":2.62,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":32.03,"LegalBenchConsumerContractsQA":49.81,"LegalBenchCorporateLobbying":88.51,"LegalQuAD":"","LegalQuAD (deu-Latn)":13.31,"LegalSummarization":54.97} +{"Rank":15,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AILACasedocs":17.45,"AILAStatutes":22.24,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":3.0,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":33.91,"LegalBenchConsumerContractsQA":52.37,"LegalBenchCorporateLobbying":87.62,"LegalQuAD":"","LegalQuAD (deu-Latn)":17.8,"LegalSummarization":56.8} +{"Rank":16,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","AILACasedocs":7.43,"AILAStatutes":13.62,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":"","LeCaRDv2":"","LeCaRDv2 (zho-Hans)":"","LegalBenchConsumerContractsQA":"","LegalBenchCorporateLobbying":"","LegalQuAD":"","LegalQuAD (deu-Latn)":"","LegalSummarization":""} +{"Rank":17,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AILACasedocs":39.0,"AILAStatutes":41.31,"GerDaLIRSmall":32.77,"GerDaLIRSmall (deu-Latn)":"","LeCaRDv2":57.2,"LeCaRDv2 (zho-Hans)":"","LegalBenchConsumerContractsQA":79.39,"LegalBenchCorporateLobbying":95.09,"LegalQuAD":57.47,"LegalQuAD (deu-Latn)":"","LegalSummarization":71.55} diff --git a/all_data_tasks/26/default.jsonl b/all_data_tasks/26/default.jsonl index 31338878568452071d05c4531f7edebf3cbf0294..9deef24bd1b9ecf7a866b50b9aac3c769c1735c3 100644 --- a/all_data_tasks/26/default.jsonl +++ b/all_data_tasks/26/default.jsonl @@ -1,13 +1,19 @@ -{"index":1,"Rank":1,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.17,"LEMBNarrativeQARetrieval":64.69,"LEMBNeedleRetrieval":75.25,"LEMBPasskeyRetrieval":97.0,"LEMBQMSumRetrieval":51.49,"LEMBSummScreenFDRetrieval":99.11,"LEMBWikimQARetrieval":87.49} -{"index":0,"Rank":2,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.85,"LEMBNarrativeQARetrieval":55.78,"LEMBNeedleRetrieval":80.5,"LEMBPasskeyRetrieval":93.75,"LEMBQMSumRetrieval":57.26,"LEMBSummScreenFDRetrieval":98.72,"LEMBWikimQARetrieval":87.08} -{"index":6,"Rank":3,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":64.4,"LEMBNarrativeQARetrieval":44.62,"LEMBNeedleRetrieval":48.25,"LEMBPasskeyRetrieval":71.0,"LEMBQMSumRetrieval":43.63,"LEMBSummScreenFDRetrieval":96.82,"LEMBWikimQARetrieval":82.11} -{"index":7,"Rank":4,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":58.12,"LEMBNarrativeQARetrieval":37.89,"LEMBNeedleRetrieval":54.25,"LEMBPasskeyRetrieval":50.25,"LEMBQMSumRetrieval":38.87,"LEMBSummScreenFDRetrieval":93.48,"LEMBWikimQARetrieval":73.99} -{"index":2,"Rank":5,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":56.56,"LEMBNarrativeQARetrieval":45.76,"LEMBNeedleRetrieval":40.25,"LEMBPasskeyRetrieval":46.0,"LEMBQMSumRetrieval":35.54,"LEMBSummScreenFDRetrieval":94.09,"LEMBWikimQARetrieval":77.73} -{"index":4,"Rank":6,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":56.52,"LEMBNarrativeQARetrieval":30.35,"LEMBNeedleRetrieval":41.5,"LEMBPasskeyRetrieval":67.25,"LEMBQMSumRetrieval":35.6,"LEMBSummScreenFDRetrieval":95.23,"LEMBWikimQARetrieval":69.19} -{"index":8,"Rank":7,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":54.81,"LEMBNarrativeQARetrieval":41.23,"LEMBNeedleRetrieval":39.5,"LEMBPasskeyRetrieval":44.75,"LEMBQMSumRetrieval":36.65,"LEMBSummScreenFDRetrieval":92.97,"LEMBWikimQARetrieval":73.75} -{"index":12,"Rank":8,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.3,"LEMBNarrativeQARetrieval":44.09,"LEMBNeedleRetrieval":29.25,"LEMBPasskeyRetrieval":63.0,"LEMBQMSumRetrieval":32.49,"LEMBSummScreenFDRetrieval":84.8,"LEMBWikimQARetrieval":54.16} -{"index":3,"Rank":9,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":47.1,"LEMBNarrativeQARetrieval":41.46,"LEMBNeedleRetrieval":33.25,"LEMBPasskeyRetrieval":38.25,"LEMBQMSumRetrieval":30.32,"LEMBSummScreenFDRetrieval":78.49,"LEMBWikimQARetrieval":60.8} -{"index":5,"Rank":10,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":40.24,"LEMBNarrativeQARetrieval":25.31,"LEMBNeedleRetrieval":28.5,"LEMBPasskeyRetrieval":33.25,"LEMBQMSumRetrieval":23.83,"LEMBSummScreenFDRetrieval":74.67,"LEMBWikimQARetrieval":55.85} -{"index":11,"Rank":11,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":31.12,"LEMBNarrativeQARetrieval":19.34,"LEMBNeedleRetrieval":16.0,"LEMBPasskeyRetrieval":24.5,"LEMBQMSumRetrieval":21.54,"LEMBSummScreenFDRetrieval":60.43,"LEMBWikimQARetrieval":44.92} -{"index":10,"Rank":12,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":29.81,"LEMBNarrativeQARetrieval":18.27,"LEMBNeedleRetrieval":20.0,"LEMBPasskeyRetrieval":23.25,"LEMBQMSumRetrieval":16.32,"LEMBSummScreenFDRetrieval":54.8,"LEMBWikimQARetrieval":46.23} -{"index":9,"Rank":13,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":25.26,"LEMBNarrativeQARetrieval":19.64,"LEMBNeedleRetrieval":12.25,"LEMBPasskeyRetrieval":14.75,"LEMBQMSumRetrieval":13.08,"LEMBSummScreenFDRetrieval":46.98,"LEMBWikimQARetrieval":44.88} +{"Rank":1,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.17,"LEMBNarrativeQARetrieval":64.69,"LEMBNeedleRetrieval":75.25,"LEMBPasskeyRetrieval":97.0,"LEMBQMSumRetrieval":51.49,"LEMBSummScreenFDRetrieval":99.11,"LEMBWikimQARetrieval":87.49} +{"Rank":2,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.85,"LEMBNarrativeQARetrieval":55.78,"LEMBNeedleRetrieval":80.5,"LEMBPasskeyRetrieval":93.75,"LEMBQMSumRetrieval":57.26,"LEMBSummScreenFDRetrieval":98.72,"LEMBWikimQARetrieval":87.08} +{"Rank":3,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":64.4,"LEMBNarrativeQARetrieval":44.62,"LEMBNeedleRetrieval":48.25,"LEMBPasskeyRetrieval":71.0,"LEMBQMSumRetrieval":43.63,"LEMBSummScreenFDRetrieval":96.82,"LEMBWikimQARetrieval":82.11} +{"Rank":4,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":58.12,"LEMBNarrativeQARetrieval":37.89,"LEMBNeedleRetrieval":54.25,"LEMBPasskeyRetrieval":50.25,"LEMBQMSumRetrieval":38.87,"LEMBSummScreenFDRetrieval":93.48,"LEMBWikimQARetrieval":73.99} +{"Rank":5,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":56.56,"LEMBNarrativeQARetrieval":45.76,"LEMBNeedleRetrieval":40.25,"LEMBPasskeyRetrieval":46.0,"LEMBQMSumRetrieval":35.54,"LEMBSummScreenFDRetrieval":94.09,"LEMBWikimQARetrieval":77.73} +{"Rank":6,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":56.52,"LEMBNarrativeQARetrieval":30.35,"LEMBNeedleRetrieval":41.5,"LEMBPasskeyRetrieval":67.25,"LEMBQMSumRetrieval":35.6,"LEMBSummScreenFDRetrieval":95.23,"LEMBWikimQARetrieval":69.19} +{"Rank":7,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":54.81,"LEMBNarrativeQARetrieval":41.23,"LEMBNeedleRetrieval":39.5,"LEMBPasskeyRetrieval":44.75,"LEMBQMSumRetrieval":36.65,"LEMBSummScreenFDRetrieval":92.97,"LEMBWikimQARetrieval":73.75} +{"Rank":8,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.3,"LEMBNarrativeQARetrieval":44.09,"LEMBNeedleRetrieval":29.25,"LEMBPasskeyRetrieval":63.0,"LEMBQMSumRetrieval":32.49,"LEMBSummScreenFDRetrieval":84.8,"LEMBWikimQARetrieval":54.16} +{"Rank":9,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":47.1,"LEMBNarrativeQARetrieval":41.46,"LEMBNeedleRetrieval":33.25,"LEMBPasskeyRetrieval":38.25,"LEMBQMSumRetrieval":30.32,"LEMBSummScreenFDRetrieval":78.49,"LEMBWikimQARetrieval":60.8} +{"Rank":10,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":40.54,"LEMBNarrativeQARetrieval":23.6,"LEMBNeedleRetrieval":32.0,"LEMBPasskeyRetrieval":38.25,"LEMBQMSumRetrieval":25.16,"LEMBSummScreenFDRetrieval":68.21,"LEMBWikimQARetrieval":56.04} +{"Rank":11,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":40.44,"LEMBNarrativeQARetrieval":24.22,"LEMBNeedleRetrieval":28.0,"LEMBPasskeyRetrieval":38.25,"LEMBQMSumRetrieval":24.26,"LEMBSummScreenFDRetrieval":71.12,"LEMBWikimQARetrieval":56.8} +{"Rank":12,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":40.24,"LEMBNarrativeQARetrieval":25.31,"LEMBNeedleRetrieval":28.5,"LEMBPasskeyRetrieval":33.25,"LEMBQMSumRetrieval":23.83,"LEMBSummScreenFDRetrieval":74.67,"LEMBWikimQARetrieval":55.85} +{"Rank":13,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":38.83,"LEMBNarrativeQARetrieval":22.6,"LEMBNeedleRetrieval":30.75,"LEMBPasskeyRetrieval":38.25,"LEMBQMSumRetrieval":21.51,"LEMBSummScreenFDRetrieval":62.75,"LEMBWikimQARetrieval":57.13} +{"Rank":14,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":31.12,"LEMBNarrativeQARetrieval":19.34,"LEMBNeedleRetrieval":16.0,"LEMBPasskeyRetrieval":24.5,"LEMBQMSumRetrieval":21.54,"LEMBSummScreenFDRetrieval":60.43,"LEMBWikimQARetrieval":44.92} +{"Rank":15,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":29.81,"LEMBNarrativeQARetrieval":18.27,"LEMBNeedleRetrieval":20.0,"LEMBPasskeyRetrieval":23.25,"LEMBQMSumRetrieval":16.32,"LEMBSummScreenFDRetrieval":54.8,"LEMBWikimQARetrieval":46.23} +{"Rank":16,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":25.26,"LEMBNarrativeQARetrieval":19.64,"LEMBNeedleRetrieval":12.25,"LEMBPasskeyRetrieval":14.75,"LEMBQMSumRetrieval":13.08,"LEMBSummScreenFDRetrieval":46.98,"LEMBWikimQARetrieval":44.88} +{"Rank":17,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":21.98,"LEMBNarrativeQARetrieval":11.45,"LEMBNeedleRetrieval":17.5,"LEMBPasskeyRetrieval":20.25,"LEMBQMSumRetrieval":14.07,"LEMBSummScreenFDRetrieval":40.52,"LEMBWikimQARetrieval":28.1} +{"Rank":18,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":21.67,"LEMBNarrativeQARetrieval":16.02,"LEMBNeedleRetrieval":14.0,"LEMBPasskeyRetrieval":7.75,"LEMBQMSumRetrieval":12.23,"LEMBSummScreenFDRetrieval":41.15,"LEMBWikimQARetrieval":38.86} +{"Rank":19,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":20.92,"LEMBNarrativeQARetrieval":13.82,"LEMBNeedleRetrieval":13.5,"LEMBPasskeyRetrieval":8.25,"LEMBQMSumRetrieval":11.02,"LEMBSummScreenFDRetrieval":38.12,"LEMBWikimQARetrieval":40.84} diff --git a/all_data_tasks/27/default.jsonl b/all_data_tasks/27/default.jsonl index e23a313c8ee437cd8988d65b247d2ca2c9148e37..373773d0dd55ede976900a238f8cb2df2f848503 100644 --- a/all_data_tasks/27/default.jsonl +++ b/all_data_tasks/27/default.jsonl @@ -1,19 +1,14 @@ -{"index":5,"Rank":1,"Model":"gbert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":32.34,"BlurbsClusteringP2P":39.3,"BlurbsClusteringS2S":13.38,"TenKGnadClusteringP2P":41.69,"TenKGnadClusteringS2S":34.97} -{"index":15,"Rank":2,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":29.74,"BlurbsClusteringP2P":39.91,"BlurbsClusteringS2S":15.94,"TenKGnadClusteringP2P":43.43,"TenKGnadClusteringS2S":19.69} -{"index":9,"Rank":3,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.55,"BlurbsClusteringP2P":35.49,"BlurbsClusteringS2S":16.17,"TenKGnadClusteringP2P":42.84,"TenKGnadClusteringS2S":23.69} -{"index":3,"Rank":4,"Model":"German_Semantic_STS_V2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.5,"BlurbsClusteringP2P":38.68,"BlurbsClusteringS2S":17.62,"TenKGnadClusteringP2P":38.0,"TenKGnadClusteringS2S":23.71} -{"index":14,"Rank":5,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":27.49,"BlurbsClusteringP2P":35.33,"BlurbsClusteringS2S":13.27,"TenKGnadClusteringP2P":44.11,"TenKGnadClusteringS2S":17.26} -{"index":12,"Rank":6,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":27.04,"BlurbsClusteringP2P":34.38,"BlurbsClusteringS2S":15.81,"TenKGnadClusteringP2P":35.96,"TenKGnadClusteringS2S":22.0} -{"index":4,"Rank":7,"Model":"gbert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":27.0,"BlurbsClusteringP2P":35.36,"BlurbsClusteringS2S":11.27,"TenKGnadClusteringP2P":37.16,"TenKGnadClusteringS2S":24.23} -{"index":16,"Rank":8,"Model":"use-cmlm-multilingual<\/a>","Model Size (Million Parameters)":472,"Memory Usage (GB, fp32)":1.76,"Average":26.9,"BlurbsClusteringP2P":29.63,"BlurbsClusteringS2S":15.24,"TenKGnadClusteringP2P":37.1,"TenKGnadClusteringS2S":25.64} -{"index":11,"Rank":9,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":26.3,"BlurbsClusteringP2P":32.46,"BlurbsClusteringS2S":14.33,"TenKGnadClusteringP2P":36.13,"TenKGnadClusteringS2S":22.26} -{"index":13,"Rank":10,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":26.29,"BlurbsClusteringP2P":30.59,"BlurbsClusteringS2S":11.57,"TenKGnadClusteringP2P":44.88,"TenKGnadClusteringS2S":18.11} -{"index":8,"Rank":11,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":22.71,"BlurbsClusteringP2P":27.0,"BlurbsClusteringS2S":14.85,"TenKGnadClusteringP2P":21.83,"TenKGnadClusteringS2S":27.16} -{"index":17,"Rank":12,"Model":"gottbert-base<\/a>","Model Size (Million Parameters)":127,"Memory Usage (GB, fp32)":0.47,"Average":21.46,"BlurbsClusteringP2P":34.49,"BlurbsClusteringS2S":8.37,"TenKGnadClusteringP2P":33.66,"TenKGnadClusteringS2S":9.34} -{"index":2,"Rank":13,"Model":"cross-en-de-roberta-sentence-transformer<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":19.49,"BlurbsClusteringP2P":30.82,"BlurbsClusteringS2S":12.69,"TenKGnadClusteringP2P":23.5,"TenKGnadClusteringS2S":10.94} -{"index":18,"Rank":14,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":18.94,"BlurbsClusteringP2P":29.84,"BlurbsClusteringS2S":7.29,"TenKGnadClusteringP2P":32.46,"TenKGnadClusteringS2S":6.16} -{"index":0,"Rank":15,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":16.62,"BlurbsClusteringP2P":17.47,"BlurbsClusteringS2S":7.96,"TenKGnadClusteringP2P":29.79,"TenKGnadClusteringS2S":11.27} -{"index":10,"Rank":16,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":10.03,"BlurbsClusteringP2P":11.37,"BlurbsClusteringS2S":8.01,"TenKGnadClusteringP2P":15.89,"TenKGnadClusteringS2S":4.84} -{"index":7,"Rank":17,"Model":"gelectra-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":9.23,"BlurbsClusteringP2P":13.96,"BlurbsClusteringS2S":7.57,"TenKGnadClusteringP2P":11.49,"TenKGnadClusteringS2S":3.91} -{"index":6,"Rank":18,"Model":"gelectra-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":7.73,"BlurbsClusteringP2P":10.06,"BlurbsClusteringS2S":7.74,"TenKGnadClusteringP2P":9.02,"TenKGnadClusteringS2S":4.11} -{"index":1,"Rank":19,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BlurbsClusteringP2P":"","BlurbsClusteringS2S":8.0,"TenKGnadClusteringP2P":"","TenKGnadClusteringS2S":""} +{"Rank":1,"Model":"gbert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":32.34,"BlurbsClusteringP2P":39.3,"BlurbsClusteringS2S":13.38,"TenKGnadClusteringP2P":41.69,"TenKGnadClusteringS2S":34.97} +{"Rank":2,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":29.74,"BlurbsClusteringP2P":39.91,"BlurbsClusteringS2S":15.94,"TenKGnadClusteringP2P":43.43,"TenKGnadClusteringS2S":19.69} +{"Rank":3,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":27.49,"BlurbsClusteringP2P":35.33,"BlurbsClusteringS2S":13.27,"TenKGnadClusteringP2P":44.11,"TenKGnadClusteringS2S":17.26} +{"Rank":4,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":27.04,"BlurbsClusteringP2P":34.38,"BlurbsClusteringS2S":15.81,"TenKGnadClusteringP2P":35.96,"TenKGnadClusteringS2S":22.0} +{"Rank":5,"Model":"gbert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":27.0,"BlurbsClusteringP2P":35.36,"BlurbsClusteringS2S":11.27,"TenKGnadClusteringP2P":37.16,"TenKGnadClusteringS2S":24.23} +{"Rank":6,"Model":"use-cmlm-multilingual<\/a>","Model Size (Million Parameters)":472,"Memory Usage (GB, fp32)":1.76,"Average":26.9,"BlurbsClusteringP2P":29.63,"BlurbsClusteringS2S":15.24,"TenKGnadClusteringP2P":37.1,"TenKGnadClusteringS2S":25.64} +{"Rank":7,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":26.3,"BlurbsClusteringP2P":32.46,"BlurbsClusteringS2S":14.33,"TenKGnadClusteringP2P":36.13,"TenKGnadClusteringS2S":22.26} +{"Rank":8,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":26.29,"BlurbsClusteringP2P":30.59,"BlurbsClusteringS2S":11.57,"TenKGnadClusteringP2P":44.88,"TenKGnadClusteringS2S":18.11} +{"Rank":9,"Model":"gottbert-base<\/a>","Model Size (Million Parameters)":127,"Memory Usage (GB, fp32)":0.47,"Average":21.46,"BlurbsClusteringP2P":34.49,"BlurbsClusteringS2S":8.37,"TenKGnadClusteringP2P":33.66,"TenKGnadClusteringS2S":9.34} +{"Rank":10,"Model":"cross-en-de-roberta-sentence-transformer<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":19.49,"BlurbsClusteringP2P":30.82,"BlurbsClusteringS2S":12.69,"TenKGnadClusteringP2P":23.5,"TenKGnadClusteringS2S":10.94} +{"Rank":11,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":18.94,"BlurbsClusteringP2P":29.84,"BlurbsClusteringS2S":7.29,"TenKGnadClusteringP2P":32.46,"TenKGnadClusteringS2S":6.16} +{"Rank":12,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":10.03,"BlurbsClusteringP2P":11.37,"BlurbsClusteringS2S":8.01,"TenKGnadClusteringP2P":15.89,"TenKGnadClusteringS2S":4.84} +{"Rank":13,"Model":"gelectra-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":9.23,"BlurbsClusteringP2P":13.96,"BlurbsClusteringS2S":7.57,"TenKGnadClusteringP2P":11.49,"TenKGnadClusteringS2S":3.91} +{"Rank":14,"Model":"gelectra-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":7.73,"BlurbsClusteringP2P":10.06,"BlurbsClusteringS2S":7.74,"TenKGnadClusteringP2P":9.02,"TenKGnadClusteringS2S":4.11} diff --git a/all_data_tasks/28/default.jsonl b/all_data_tasks/28/default.jsonl index e6a0f1811b767f45750f0c6da521a89a1455e771..04b50a7f4e9e8f782683ab712b8b2666368f8315 100644 --- a/all_data_tasks/28/default.jsonl +++ b/all_data_tasks/28/default.jsonl @@ -1,53 +1,23 @@ -{"index":3,"Rank":1,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.99,"AllegroReviews":65.0,"CBD":84.13,"MassiveIntentClassification (pl)":79.41,"MassiveScenarioClassification (pl)":81.93,"PAC":67.24,"PolEmo2.0-IN":90.42,"PolEmo2.0-OUT":77.77} -{"index":22,"Rank":2,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.84,"AllegroReviews":67.14,"CBD":78.03,"MassiveIntentClassification (pl)":80.75,"MassiveScenarioClassification (pl)":85.54,"PAC":69.04,"PolEmo2.0-IN":89.39,"PolEmo2.0-OUT":74.98} -{"index":34,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.84,"AllegroReviews":67.14,"CBD":78.03,"MassiveIntentClassification (pl)":80.75,"MassiveScenarioClassification (pl)":85.54,"PAC":69.04,"PolEmo2.0-IN":89.39,"PolEmo2.0-OUT":74.98} -{"index":2,"Rank":4,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":77.84,"AllegroReviews":67.14,"CBD":78.03,"MassiveIntentClassification (pl)":80.75,"MassiveScenarioClassification (pl)":85.54,"PAC":69.04,"PolEmo2.0-IN":89.39,"PolEmo2.0-OUT":74.98} -{"index":1,"Rank":5,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.29,"AllegroReviews":63.98,"CBD":68.56,"MassiveIntentClassification (pl)":73.55,"MassiveScenarioClassification (pl)":76.37,"PAC":69.04,"PolEmo2.0-IN":86.16,"PolEmo2.0-OUT":68.4} -{"index":33,"Rank":6,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.29,"AllegroReviews":63.98,"CBD":68.56,"MassiveIntentClassification (pl)":73.55,"MassiveScenarioClassification (pl)":76.37,"PAC":69.04,"PolEmo2.0-IN":86.16,"PolEmo2.0-OUT":68.4} -{"index":39,"Rank":7,"Model":"mmlw-roberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.39,"AllegroReviews":47.49,"CBD":69.33,"MassiveIntentClassification (pl)":74.81,"MassiveScenarioClassification (pl)":77.84,"PAC":64.69,"PolEmo2.0-IN":76.84,"PolEmo2.0-OUT":53.72} -{"index":26,"Rank":8,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":63.82,"AllegroReviews":41.14,"CBD":69.9,"MassiveIntentClassification (pl)":65.07,"MassiveScenarioClassification (pl)":69.82,"PAC":70.37,"PolEmo2.0-IN":77.06,"PolEmo2.0-OUT":53.38} -{"index":38,"Rank":9,"Model":"mmlw-roberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.92,"AllegroReviews":40.26,"CBD":68.11,"MassiveIntentClassification (pl)":72.31,"MassiveScenarioClassification (pl)":75.53,"PAC":65.87,"PolEmo2.0-IN":71.73,"PolEmo2.0-OUT":46.6} -{"index":36,"Rank":10,"Model":"mmlw-e5-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.07,"AllegroReviews":37.68,"CBD":66.15,"MassiveIntentClassification (pl)":72.01,"MassiveScenarioClassification (pl)":75.27,"PAC":63.77,"PolEmo2.0-IN":69.46,"PolEmo2.0-OUT":43.14} -{"index":19,"Rank":11,"Model":"st-polish-kartonberta-base-alpha-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.44,"AllegroReviews":40.19,"CBD":67.69,"MassiveIntentClassification (pl)":68.2,"MassiveScenarioClassification (pl)":73.97,"PAC":66.55,"PolEmo2.0-IN":68.41,"PolEmo2.0-OUT":38.06} -{"index":35,"Rank":12,"Model":"mmlw-e5-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.52,"AllegroReviews":36.38,"CBD":65.54,"MassiveIntentClassification (pl)":70.93,"MassiveScenarioClassification (pl)":74.76,"PAC":64.5,"PolEmo2.0-IN":68.7,"PolEmo2.0-OUT":35.81} -{"index":29,"Rank":13,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":59.26,"AllegroReviews":34.11,"CBD":68.35,"MassiveIntentClassification (pl)":65.53,"MassiveScenarioClassification (pl)":68.51,"PAC":68.4,"PolEmo2.0-IN":64.18,"PolEmo2.0-OUT":45.73} -{"index":25,"Rank":14,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":59.01,"AllegroReviews":40.85,"CBD":62.66,"MassiveIntentClassification (pl)":61.04,"MassiveScenarioClassification (pl)":66.11,"PAC":70.87,"PolEmo2.0-IN":67.66,"PolEmo2.0-OUT":43.91} -{"index":30,"Rank":15,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":58.99,"AllegroReviews":33.35,"CBD":68.51,"MassiveIntentClassification (pl)":66.63,"MassiveScenarioClassification (pl)":69.97,"PAC":66.26,"PolEmo2.0-IN":63.52,"PolEmo2.0-OUT":44.7} -{"index":40,"Rank":16,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":58.55,"AllegroReviews":34.5,"CBD":70.27,"MassiveIntentClassification (pl)":64.81,"MassiveScenarioClassification (pl)":70.01,"PAC":64.6,"PolEmo2.0-IN":67.06,"PolEmo2.0-OUT":38.58} -{"index":37,"Rank":17,"Model":"mmlw-e5-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.73,"AllegroReviews":33.03,"CBD":64.24,"MassiveIntentClassification (pl)":67.35,"MassiveScenarioClassification (pl)":72.13,"PAC":63.11,"PolEmo2.0-IN":63.96,"PolEmo2.0-OUT":40.32} -{"index":41,"Rank":18,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":57.49,"AllegroReviews":34.55,"CBD":67.48,"MassiveIntentClassification (pl)":65.93,"MassiveScenarioClassification (pl)":71.85,"PAC":63.25,"PolEmo2.0-IN":68.37,"PolEmo2.0-OUT":30.99} -{"index":42,"Rank":19,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":57.39,"AllegroReviews":34.89,"CBD":65.71,"MassiveIntentClassification (pl)":59.71,"MassiveScenarioClassification (pl)":64.58,"PAC":68.11,"PolEmo2.0-IN":64.0,"PolEmo2.0-OUT":44.72} -{"index":28,"Rank":20,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":57.05,"AllegroReviews":37.42,"CBD":63.25,"MassiveIntentClassification (pl)":57.4,"MassiveScenarioClassification (pl)":64.25,"PAC":70.55,"PolEmo2.0-IN":67.35,"PolEmo2.0-OUT":39.13} -{"index":23,"Rank":21,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.34,"AllegroReviews":29.62,"CBD":63.83,"MassiveIntentClassification (pl)":65.86,"MassiveScenarioClassification (pl)":69.99,"PAC":73.87,"PolEmo2.0-IN":52.8,"PolEmo2.0-OUT":38.4} -{"index":52,"Rank":22,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":54.09,"AllegroReviews":33.86,"CBD":65.0,"MassiveIntentClassification (pl)":64.29,"MassiveScenarioClassification (pl)":68.98,"PAC":63.76,"PolEmo2.0-IN":62.78,"PolEmo2.0-OUT":19.98} -{"index":51,"Rank":23,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":52.18,"AllegroReviews":30.88,"CBD":57.68,"MassiveIntentClassification (pl)":59.43,"MassiveScenarioClassification (pl)":65.04,"PAC":65.76,"PolEmo2.0-IN":57.76,"PolEmo2.0-OUT":28.7} -{"index":48,"Rank":24,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":49.92,"AllegroReviews":28.03,"CBD":60.0,"MassiveIntentClassification (pl)":53.1,"MassiveScenarioClassification (pl)":61.29,"PAC":68.17,"PolEmo2.0-IN":48.84,"PolEmo2.0-OUT":30.0} -{"index":4,"Rank":25,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.66,"AllegroReviews":24.89,"CBD":53.78,"MassiveIntentClassification (pl)":31.77,"MassiveScenarioClassification (pl)":37.49,"PAC":57.14,"PolEmo2.0-IN":40.97,"PolEmo2.0-OUT":17.57} -{"index":0,"Rank":26,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":44.99,"MassiveScenarioClassification (pl)":52.92,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":5,"Rank":27,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":27.62,"MassiveScenarioClassification (pl)":31.6,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":6,"Rank":28,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":33.17,"MassiveScenarioClassification (pl)":36.34,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":8,"Rank":30,"Model":"2024-06-15_10-09-42<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":0.23,"CBD":0.5,"MassiveIntentClassification (pl)":"","MassiveScenarioClassification (pl)":"","PAC":0.56,"PolEmo2.0-IN":0.37,"PolEmo2.0-OUT":0.3} -{"index":9,"Rank":31,"Model":"2024-06-17_21-37-12<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":0.21,"CBD":0.54,"MassiveIntentClassification (pl)":"","MassiveScenarioClassification (pl)":"","PAC":0.63,"PolEmo2.0-IN":0.34,"PolEmo2.0-OUT":0.28} -{"index":10,"Rank":32,"Model":"2024-06-19_08-22-22<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":0.22,"CBD":0.51,"MassiveIntentClassification (pl)":"","MassiveScenarioClassification (pl)":"","PAC":0.55,"PolEmo2.0-IN":0.39,"PolEmo2.0-OUT":0.29} -{"index":11,"Rank":33,"Model":"2024-06-19_10-03-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":0.26,"CBD":0.55,"MassiveIntentClassification (pl)":"","MassiveScenarioClassification (pl)":"","PAC":0.58,"PolEmo2.0-IN":0.36,"PolEmo2.0-OUT":0.29} -{"index":12,"Rank":34,"Model":"2024-06-19_21-12-17<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":0.22,"CBD":0.52,"MassiveIntentClassification (pl)":"","MassiveScenarioClassification (pl)":"","PAC":0.55,"PolEmo2.0-IN":0.35,"PolEmo2.0-OUT":0.3} -{"index":13,"Rank":35,"Model":"2024-06-19_22-23-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":0.27,"CBD":0.58,"MassiveIntentClassification (pl)":"","MassiveScenarioClassification (pl)":"","PAC":0.65,"PolEmo2.0-IN":0.53,"PolEmo2.0-OUT":0.23} -{"index":14,"Rank":36,"Model":"2024-06-19_22-27-15<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":21.69,"MassiveScenarioClassification (pl)":26.17,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":15,"Rank":37,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":38.21,"MassiveScenarioClassification (pl)":46.21,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":17,"Rank":39,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":42.2,"MassiveScenarioClassification (pl)":41.89,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":18,"Rank":40,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":42.62,"MassiveScenarioClassification (pl)":41.88,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":20,"Rank":41,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":60.07,"MassiveScenarioClassification (pl)":64.0,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":21,"Rank":42,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":50.48,"MassiveScenarioClassification (pl)":49.98,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":24,"Rank":43,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":74.85,"MassiveScenarioClassification (pl)":77.37,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":27,"Rank":44,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":73.65,"MassiveScenarioClassification (pl)":76.69,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":31,"Rank":45,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":44.47,"MassiveScenarioClassification (pl)":45.6,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":32,"Rank":46,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":41.63,"MassiveScenarioClassification (pl)":41.63,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":43,"Rank":47,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":37.63,"MassiveScenarioClassification (pl)":44.72,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":46,"Rank":50,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":35.77,"MassiveScenarioClassification (pl)":36.87,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":49,"Rank":52,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":39.08,"MassiveScenarioClassification (pl)":46.79,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":50,"Rank":53,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":42.64,"MassiveScenarioClassification (pl)":49.97,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":53,"Rank":54,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":34.41,"MassiveScenarioClassification (pl)":42.3,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":54,"Rank":55,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":37.67,"MassiveScenarioClassification (pl)":45.2,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":55,"Rank":56,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":43.95,"MassiveScenarioClassification (pl)":49.87,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":56,"Rank":57,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":26.05,"MassiveScenarioClassification (pl)":30.15,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"index":57,"Rank":58,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":58.91,"MassiveScenarioClassification (pl)":62.55,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":62.36,"AllegroReviews":41.14,"AllegroReviews (pol-Latn)":41.04,"CBD":69.9,"CBD (pol-Latn)":69.84,"PAC":70.37,"PAC (pol-Latn)":70.33,"PolEmo2.0-IN":77.06,"PolEmo2.0-IN (pol-Latn)":77.06,"PolEmo2.0-OUT":53.38,"PolEmo2.0-OUT (pol-Latn)":53.48} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":57.17,"AllegroReviews":40.85,"AllegroReviews (pol-Latn)":40.78,"CBD":62.66,"CBD (pol-Latn)":62.6,"PAC":70.87,"PAC (pol-Latn)":70.87,"PolEmo2.0-IN":67.66,"PolEmo2.0-IN (pol-Latn)":67.59,"PolEmo2.0-OUT":43.91,"PolEmo2.0-OUT (pol-Latn)":43.93} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":55.53,"AllegroReviews":37.42,"AllegroReviews (pol-Latn)":37.33,"CBD":63.25,"CBD (pol-Latn)":63.33,"PAC":70.55,"PAC (pol-Latn)":70.48,"PolEmo2.0-IN":67.35,"PolEmo2.0-IN (pol-Latn)":67.31,"PolEmo2.0-OUT":39.13,"PolEmo2.0-OUT (pol-Latn)":39.17} +{"Rank":4,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":55.48,"AllegroReviews":34.89,"AllegroReviews (pol-Latn)":34.86,"CBD":65.71,"CBD (pol-Latn)":65.74,"PAC":68.11,"PAC (pol-Latn)":68.09,"PolEmo2.0-IN":64.0,"PolEmo2.0-IN (pol-Latn)":63.91,"PolEmo2.0-OUT":44.72,"PolEmo2.0-OUT (pol-Latn)":44.76} +{"Rank":5,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":49.07,"AllegroReviews":33.86,"AllegroReviews (pol-Latn)":33.89,"CBD":65.0,"CBD (pol-Latn)":64.97,"PAC":63.76,"PAC (pol-Latn)":63.76,"PolEmo2.0-IN":62.78,"PolEmo2.0-IN (pol-Latn)":62.74,"PolEmo2.0-OUT":19.98,"PolEmo2.0-OUT (pol-Latn)":19.92} +{"Rank":6,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":48.15,"AllegroReviews":30.88,"AllegroReviews (pol-Latn)":30.85,"CBD":57.68,"CBD (pol-Latn)":57.71,"PAC":65.76,"PAC (pol-Latn)":65.75,"PolEmo2.0-IN":57.76,"PolEmo2.0-IN (pol-Latn)":57.76,"PolEmo2.0-OUT":28.7,"PolEmo2.0-OUT (pol-Latn)":28.66} +{"Rank":7,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":null,"CBD":null,"CBD (pol-Latn)":null,"PAC":null,"PAC (pol-Latn)":null,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":8,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":37.32,"CBD":null,"CBD (pol-Latn)":70.98,"PAC":null,"PAC (pol-Latn)":68.09,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":66.07,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":32.94} +{"Rank":9,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"AllegroReviews":34.11,"AllegroReviews (pol-Latn)":null,"CBD":68.35,"CBD (pol-Latn)":null,"PAC":68.4,"PAC (pol-Latn)":null,"PolEmo2.0-IN":64.18,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":45.73,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":10,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"AllegroReviews":33.35,"AllegroReviews (pol-Latn)":null,"CBD":68.51,"CBD (pol-Latn)":null,"PAC":66.26,"PAC (pol-Latn)":null,"PolEmo2.0-IN":63.52,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":44.7,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":11,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"AllegroReviews":34.5,"AllegroReviews (pol-Latn)":null,"CBD":70.27,"CBD (pol-Latn)":null,"PAC":64.6,"PAC (pol-Latn)":null,"PolEmo2.0-IN":67.06,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":38.58,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":12,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"AllegroReviews":34.55,"AllegroReviews (pol-Latn)":null,"CBD":67.48,"CBD (pol-Latn)":null,"PAC":63.25,"PAC (pol-Latn)":null,"PolEmo2.0-IN":68.37,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":30.99,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":13,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":23.85,"CBD":null,"CBD (pol-Latn)":48.46,"PAC":null,"PAC (pol-Latn)":59.53,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":38.32,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":22.98} +{"Rank":14,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":24.64,"CBD":null,"CBD (pol-Latn)":50.9,"PAC":null,"PAC (pol-Latn)":59.78,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":40.29,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":25.0} +{"Rank":15,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":22.99,"CBD":null,"CBD (pol-Latn)":50.25,"PAC":null,"PAC (pol-Latn)":62.1,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":41.63,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":25.0} +{"Rank":16,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":null,"CBD":null,"CBD (pol-Latn)":null,"PAC":null,"PAC (pol-Latn)":null,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":17,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":null,"CBD":null,"CBD (pol-Latn)":null,"PAC":null,"PAC (pol-Latn)":null,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":18,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"AllegroReviews":28.03,"AllegroReviews (pol-Latn)":null,"CBD":60.0,"CBD (pol-Latn)":null,"PAC":68.17,"PAC (pol-Latn)":null,"PolEmo2.0-IN":48.84,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":30.0,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":19,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":null,"CBD":null,"CBD (pol-Latn)":null,"PAC":null,"PAC (pol-Latn)":null,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":20,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":null,"CBD":null,"CBD (pol-Latn)":null,"PAC":null,"PAC (pol-Latn)":null,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":21,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":null,"CBD":null,"CBD (pol-Latn)":null,"PAC":null,"PAC (pol-Latn)":null,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":22,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":null,"CBD":null,"CBD (pol-Latn)":null,"PAC":null,"PAC (pol-Latn)":null,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":23,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":null,"CBD":null,"CBD (pol-Latn)":null,"PAC":null,"PAC (pol-Latn)":null,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":null} diff --git a/all_data_tasks/29/default.jsonl b/all_data_tasks/29/default.jsonl index fecc7c6b8ec9fa435403623acf3c03196c3ffadf..6d29299e612a33acb2c6393574dc679bba061880 100644 --- a/all_data_tasks/29/default.jsonl +++ b/all_data_tasks/29/default.jsonl @@ -1,24 +1,23 @@ -{"index":2,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"8TagsClustering":51.36} -{"index":22,"Rank":2,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":51.36} -{"index":34,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":51.36} -{"index":1,"Rank":4,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":44.59} -{"index":33,"Rank":5,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":44.59} -{"index":26,"Rank":6,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"8TagsClustering":33.88} -{"index":41,"Rank":7,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"8TagsClustering":33.15} -{"index":38,"Rank":8,"Model":"mmlw-roberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":33.08} -{"index":19,"Rank":9,"Model":"st-polish-kartonberta-base-alpha-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":32.85} -{"index":37,"Rank":10,"Model":"mmlw-e5-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":31.77} -{"index":40,"Rank":11,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"8TagsClustering":31.68} -{"index":30,"Rank":12,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"8TagsClustering":31.49} -{"index":39,"Rank":13,"Model":"mmlw-roberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":31.16} -{"index":36,"Rank":14,"Model":"mmlw-e5-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":30.62} -{"index":35,"Rank":15,"Model":"mmlw-e5-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":30.25} -{"index":29,"Rank":16,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"8TagsClustering":28.15} -{"index":52,"Rank":17,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"8TagsClustering":25.62} -{"index":25,"Rank":18,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"8TagsClustering":24.97} -{"index":28,"Rank":19,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"8TagsClustering":23.92} -{"index":51,"Rank":20,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"8TagsClustering":23.24} -{"index":23,"Rank":21,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":18.79} -{"index":42,"Rank":22,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"8TagsClustering":12.96} -{"index":48,"Rank":23,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"8TagsClustering":12.51} -{"index":4,"Rank":24,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":4.68} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"8TagsClustering":33.88} +{"Rank":2,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"8TagsClustering":33.15} +{"Rank":3,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"8TagsClustering":31.68} +{"Rank":4,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"8TagsClustering":31.49} +{"Rank":5,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"8TagsClustering":28.15} +{"Rank":6,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"8TagsClustering":25.62} +{"Rank":7,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"8TagsClustering":24.97} +{"Rank":8,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"8TagsClustering":23.92} +{"Rank":9,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"8TagsClustering":23.24} +{"Rank":10,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"8TagsClustering":12.96} +{"Rank":11,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"8TagsClustering":12.51} +{"Rank":12,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"8TagsClustering":null} +{"Rank":13,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"8TagsClustering":null} +{"Rank":14,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"8TagsClustering":null} +{"Rank":15,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"8TagsClustering":null} +{"Rank":16,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"8TagsClustering":null} +{"Rank":17,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"8TagsClustering":null} +{"Rank":18,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"8TagsClustering":null} +{"Rank":19,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"8TagsClustering":null} +{"Rank":20,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"8TagsClustering":null} +{"Rank":21,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"8TagsClustering":null} +{"Rank":22,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"8TagsClustering":null} +{"Rank":23,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"8TagsClustering":null} diff --git a/all_data_tasks/3/default.jsonl b/all_data_tasks/3/default.jsonl index 4c0f2e5523778761b5a51a88bc9accddb3e11dc4..80ffbd841925016e1c5659a97c6e96210c32ad7a 100644 --- a/all_data_tasks/3/default.jsonl +++ b/all_data_tasks/3/default.jsonl @@ -1,199 +1,57 @@ -{"index":51,"Rank":1,"Model":"neural-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.49,"AskUbuntuDupQuestions":68.45,"MindSmallReranking":31.99,"SciDocsRR":89.26,"StackOverflowDupQuestions":56.26} -{"index":17,"Rank":2,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":61.42,"AskUbuntuDupQuestions":67.58,"MindSmallReranking":33.36,"SciDocsRR":89.09,"StackOverflowDupQuestions":55.66} -{"index":126,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.42,"AskUbuntuDupQuestions":67.58,"MindSmallReranking":33.36,"SciDocsRR":89.09,"StackOverflowDupQuestions":55.66} -{"index":205,"Rank":4,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.42,"AskUbuntuDupQuestions":67.58,"MindSmallReranking":33.36,"SciDocsRR":89.09,"StackOverflowDupQuestions":55.66} -{"index":138,"Rank":5,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.21,"AskUbuntuDupQuestions":67.33,"MindSmallReranking":33.05,"SciDocsRR":89.2,"StackOverflowDupQuestions":55.25} -{"index":96,"Rank":6,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":60.64,"AskUbuntuDupQuestions":67.58,"MindSmallReranking":32.72,"SciDocsRR":86.58,"StackOverflowDupQuestions":55.68} -{"index":215,"Rank":7,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Average":60.54,"AskUbuntuDupQuestions":67.5,"MindSmallReranking":30.82,"SciDocsRR":87.26,"StackOverflowDupQuestions":56.58} -{"index":219,"Rank":8,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.46,"AskUbuntuDupQuestions":66.73,"MindSmallReranking":32.51,"SciDocsRR":87.03,"StackOverflowDupQuestions":55.55} -{"index":58,"Rank":9,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":60.29,"AskUbuntuDupQuestions":66.82,"MindSmallReranking":32.06,"SciDocsRR":86.4,"StackOverflowDupQuestions":55.89} -{"index":156,"Rank":10,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":60.21,"AskUbuntuDupQuestions":66.98,"MindSmallReranking":32.6,"SciDocsRR":86.33,"StackOverflowDupQuestions":54.91} -{"index":139,"Rank":11,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.16,"AskUbuntuDupQuestions":66.15,"MindSmallReranking":33.05,"SciDocsRR":88.44,"StackOverflowDupQuestions":52.99} -{"index":95,"Rank":12,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.14,"AskUbuntuDupQuestions":66.71,"MindSmallReranking":31.26,"SciDocsRR":87.29,"StackOverflowDupQuestions":55.32} -{"index":15,"Rank":13,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":60.13,"AskUbuntuDupQuestions":66.0,"MindSmallReranking":32.71,"SciDocsRR":87.89,"StackOverflowDupQuestions":53.93} -{"index":194,"Rank":14,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.11,"AskUbuntuDupQuestions":65.16,"MindSmallReranking":32.54,"SciDocsRR":87.53,"StackOverflowDupQuestions":55.22} -{"index":133,"Rank":15,"Model":"mxbai-embed-large-v1-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.11,"AskUbuntuDupQuestions":65.16,"MindSmallReranking":32.54,"SciDocsRR":87.53,"StackOverflowDupQuestions":55.22} -{"index":6,"Rank":16,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.09,"AskUbuntuDupQuestions":64.92,"MindSmallReranking":30.97,"SciDocsRR":89.34,"StackOverflowDupQuestions":55.11} -{"index":117,"Rank":17,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.05,"AskUbuntuDupQuestions":64.7,"MindSmallReranking":32.82,"SciDocsRR":87.8,"StackOverflowDupQuestions":54.88} -{"index":186,"Rank":18,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.04,"AskUbuntuDupQuestions":64.46,"MindSmallReranking":32.27,"SciDocsRR":87.56,"StackOverflowDupQuestions":55.85} -{"index":114,"Rank":19,"Model":"bge-large-en-v1.5-Q4_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.03,"AskUbuntuDupQuestions":64.47,"MindSmallReranking":32.06,"SciDocsRR":87.63,"StackOverflowDupQuestions":55.95} -{"index":150,"Rank":20,"Model":"bge-large-en-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.03,"AskUbuntuDupQuestions":64.47,"MindSmallReranking":32.06,"SciDocsRR":87.63,"StackOverflowDupQuestions":55.95} -{"index":22,"Rank":21,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":60.03,"AskUbuntuDupQuestions":64.47,"MindSmallReranking":32.06,"SciDocsRR":87.63,"StackOverflowDupQuestions":55.95} -{"index":197,"Rank":22,"Model":"b1ade-embed-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.0,"AskUbuntuDupQuestions":64.42,"MindSmallReranking":32.78,"SciDocsRR":87.61,"StackOverflowDupQuestions":55.19} -{"index":261,"Rank":23,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.0,"AskUbuntuDupQuestions":64.42,"MindSmallReranking":32.78,"SciDocsRR":87.61,"StackOverflowDupQuestions":55.19} -{"index":204,"Rank":24,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.98,"AskUbuntuDupQuestions":64.55,"MindSmallReranking":33.94,"SciDocsRR":86.52,"StackOverflowDupQuestions":54.91} -{"index":16,"Rank":25,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.98,"AskUbuntuDupQuestions":64.55,"MindSmallReranking":33.94,"SciDocsRR":86.52,"StackOverflowDupQuestions":54.91} -{"index":53,"Rank":26,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.97,"AskUbuntuDupQuestions":64.29,"MindSmallReranking":32.66,"SciDocsRR":87.65,"StackOverflowDupQuestions":55.28} -{"index":111,"Rank":27,"Model":"UAE-Large-V1-Q5_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.88,"AskUbuntuDupQuestions":64.2,"MindSmallReranking":32.51,"SciDocsRR":87.49,"StackOverflowDupQuestions":55.32} -{"index":108,"Rank":28,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.88,"AskUbuntuDupQuestions":64.2,"MindSmallReranking":32.51,"SciDocsRR":87.49,"StackOverflowDupQuestions":55.32} -{"index":165,"Rank":29,"Model":"UAE-Large-V1-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.88,"AskUbuntuDupQuestions":64.2,"MindSmallReranking":32.51,"SciDocsRR":87.49,"StackOverflowDupQuestions":55.32} -{"index":170,"Rank":30,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.86,"AskUbuntuDupQuestions":64.32,"MindSmallReranking":32.27,"SciDocsRR":87.47,"StackOverflowDupQuestions":55.4} -{"index":21,"Rank":31,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.86,"AskUbuntuDupQuestions":65.15,"MindSmallReranking":30.6,"SciDocsRR":86.96,"StackOverflowDupQuestions":56.71} -{"index":43,"Rank":32,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.8,"AskUbuntuDupQuestions":65.6,"MindSmallReranking":32.84,"SciDocsRR":86.43,"StackOverflowDupQuestions":54.33} -{"index":8,"Rank":33,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.74,"AskUbuntuDupQuestions":65.77,"MindSmallReranking":31.69,"SciDocsRR":87.03,"StackOverflowDupQuestions":54.49} -{"index":23,"Rank":34,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.72,"AskUbuntuDupQuestions":64.59,"MindSmallReranking":31.79,"SciDocsRR":87.6,"StackOverflowDupQuestions":54.9} -{"index":62,"Rank":35,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":59.68,"AskUbuntuDupQuestions":65.19,"MindSmallReranking":32.67,"SciDocsRR":86.05,"StackOverflowDupQuestions":54.82} -{"index":115,"Rank":36,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.37,"AskUbuntuDupQuestions":62.69,"MindSmallReranking":32.36,"SciDocsRR":87.72,"StackOverflowDupQuestions":54.72} -{"index":230,"Rank":37,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":59.36,"AskUbuntuDupQuestions":65.85,"MindSmallReranking":30.97,"SciDocsRR":88.65,"StackOverflowDupQuestions":51.98} -{"index":283,"Rank":38,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.16,"AskUbuntuDupQuestions":65.03,"MindSmallReranking":29.86,"SciDocsRR":86.66,"StackOverflowDupQuestions":55.08} -{"index":253,"Rank":39,"Model":"gte-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.13,"AskUbuntuDupQuestions":63.06,"MindSmallReranking":32.63,"SciDocsRR":87.2,"StackOverflowDupQuestions":53.63} -{"index":198,"Rank":40,"Model":"bge-base-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.94,"AskUbuntuDupQuestions":62.39,"MindSmallReranking":31.89,"SciDocsRR":87.05,"StackOverflowDupQuestions":54.45} -{"index":193,"Rank":41,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.94,"AskUbuntuDupQuestions":63.77,"MindSmallReranking":31.85,"SciDocsRR":86.98,"StackOverflowDupQuestions":53.16} -{"index":1,"Rank":42,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":58.9,"AskUbuntuDupQuestions":64.4,"MindSmallReranking":33.07,"SciDocsRR":83.59,"StackOverflowDupQuestions":54.56} -{"index":120,"Rank":43,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.86,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":31.2,"SciDocsRR":87.49,"StackOverflowDupQuestions":54.61} -{"index":20,"Rank":44,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":58.86,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":31.2,"SciDocsRR":87.49,"StackOverflowDupQuestions":54.61} -{"index":179,"Rank":45,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.86,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":31.2,"SciDocsRR":87.49,"StackOverflowDupQuestions":54.61} -{"index":181,"Rank":46,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.86,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":31.2,"SciDocsRR":87.49,"StackOverflowDupQuestions":54.61} -{"index":182,"Rank":47,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.86,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":31.2,"SciDocsRR":87.49,"StackOverflowDupQuestions":54.61} -{"index":180,"Rank":48,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.86,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":31.2,"SciDocsRR":87.49,"StackOverflowDupQuestions":54.61} -{"index":151,"Rank":49,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.78,"AskUbuntuDupQuestions":62.72,"MindSmallReranking":31.91,"SciDocsRR":86.66,"StackOverflowDupQuestions":53.81} -{"index":137,"Rank":50,"Model":"stella-base-en-v2-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.78,"AskUbuntuDupQuestions":62.72,"MindSmallReranking":31.91,"SciDocsRR":86.66,"StackOverflowDupQuestions":53.81} -{"index":252,"Rank":51,"Model":"gte-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.61,"AskUbuntuDupQuestions":61.8,"MindSmallReranking":32.54,"SciDocsRR":87.08,"StackOverflowDupQuestions":53.01} -{"index":161,"Rank":52,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.58,"AskUbuntuDupQuestions":63.89,"MindSmallReranking":33.09,"SciDocsRR":85.87,"StackOverflowDupQuestions":51.45} -{"index":118,"Rank":53,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.56,"AskUbuntuDupQuestions":62.33,"MindSmallReranking":32.36,"SciDocsRR":86.48,"StackOverflowDupQuestions":53.06} -{"index":19,"Rank":54,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.5,"AskUbuntuDupQuestions":63.13,"MindSmallReranking":31.46,"SciDocsRR":86.93,"StackOverflowDupQuestions":52.48} -{"index":228,"Rank":55,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":58.44,"AskUbuntuDupQuestions":64.06,"MindSmallReranking":31.02,"SciDocsRR":87.2,"StackOverflowDupQuestions":51.47} -{"index":64,"Rank":56,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":58.42,"AskUbuntuDupQuestions":63.98,"MindSmallReranking":31.5,"SciDocsRR":83.8,"StackOverflowDupQuestions":54.41} -{"index":24,"Rank":57,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":58.36,"AskUbuntuDupQuestions":62.59,"MindSmallReranking":31.29,"SciDocsRR":85.94,"StackOverflowDupQuestions":53.64} -{"index":125,"Rank":58,"Model":"NoInstruct-small-Embedding-v0-Q4_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.3,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":31.92,"SciDocsRR":86.18,"StackOverflowDupQuestions":52.96} -{"index":119,"Rank":59,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.3,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":31.92,"SciDocsRR":86.18,"StackOverflowDupQuestions":52.96} -{"index":9,"Rank":60,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":58.24,"AskUbuntuDupQuestions":63.24,"MindSmallReranking":31.48,"SciDocsRR":84.68,"StackOverflowDupQuestions":53.56} -{"index":178,"Rank":61,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.14,"AskUbuntuDupQuestions":64.13,"MindSmallReranking":32.92,"SciDocsRR":83.68,"StackOverflowDupQuestions":51.84} -{"index":229,"Rank":62,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":58.04,"AskUbuntuDupQuestions":63.48,"MindSmallReranking":30.8,"SciDocsRR":87.12,"StackOverflowDupQuestions":50.76} -{"index":192,"Rank":63,"Model":"all-MiniLM-L6-v2-ds<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.04,"AskUbuntuDupQuestions":63.48,"MindSmallReranking":30.8,"SciDocsRR":87.12,"StackOverflowDupQuestions":50.76} -{"index":34,"Rank":64,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.01,"AskUbuntuDupQuestions":61.7,"MindSmallReranking":32.67,"SciDocsRR":85.2,"StackOverflowDupQuestions":52.48} -{"index":282,"Rank":65,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.99,"AskUbuntuDupQuestions":64.61,"MindSmallReranking":29.63,"SciDocsRR":84.25,"StackOverflowDupQuestions":53.46} -{"index":116,"Rank":66,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.95,"AskUbuntuDupQuestions":62.97,"MindSmallReranking":31.31,"SciDocsRR":87.14,"StackOverflowDupQuestions":50.36} -{"index":202,"Rank":67,"Model":"bge-small-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.86,"AskUbuntuDupQuestions":62.4,"MindSmallReranking":31.28,"SciDocsRR":85.01,"StackOverflowDupQuestions":52.75} -{"index":36,"Rank":68,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.86,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":32.59,"SciDocsRR":84.31,"StackOverflowDupQuestions":52.4} -{"index":0,"Rank":69,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":57.78,"AskUbuntuDupQuestions":63.84,"MindSmallReranking":31.89,"SciDocsRR":81.62,"StackOverflowDupQuestions":53.76} -{"index":254,"Rank":70,"Model":"gte-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.7,"AskUbuntuDupQuestions":61.72,"MindSmallReranking":32.21,"SciDocsRR":84.83,"StackOverflowDupQuestions":52.03} -{"index":18,"Rank":71,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.66,"AskUbuntuDupQuestions":61.83,"MindSmallReranking":31.34,"SciDocsRR":85.17,"StackOverflowDupQuestions":52.29} -{"index":148,"Rank":72,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":57.54,"AskUbuntuDupQuestions":64.3,"MindSmallReranking":31.68,"SciDocsRR":82.0,"StackOverflowDupQuestions":52.17} -{"index":112,"Rank":73,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.5,"AskUbuntuDupQuestions":63.29,"MindSmallReranking":31.3,"SciDocsRR":80.67,"StackOverflowDupQuestions":54.75} -{"index":60,"Rank":74,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":57.38,"AskUbuntuDupQuestions":63.13,"MindSmallReranking":31.34,"SciDocsRR":84.03,"StackOverflowDupQuestions":51.02} -{"index":149,"Rank":75,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":57.29,"AskUbuntuDupQuestions":65.35,"MindSmallReranking":31.81,"SciDocsRR":79.49,"StackOverflowDupQuestions":52.52} -{"index":199,"Rank":76,"Model":"bge-base-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.18,"AskUbuntuDupQuestions":60.23,"MindSmallReranking":31.71,"SciDocsRR":84.46,"StackOverflowDupQuestions":52.32} -{"index":185,"Rank":77,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.12,"AskUbuntuDupQuestions":62.73,"MindSmallReranking":30.81,"SciDocsRR":85.11,"StackOverflowDupQuestions":49.85} -{"index":42,"Rank":78,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":57.09,"AskUbuntuDupQuestions":61.11,"MindSmallReranking":31.53,"SciDocsRR":84.78,"StackOverflowDupQuestions":50.95} -{"index":175,"Rank":79,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":56.98,"AskUbuntuDupQuestions":62.25,"MindSmallReranking":30.54,"SciDocsRR":83.1,"StackOverflowDupQuestions":52.05} -{"index":135,"Rank":80,"Model":"jina-embeddings-v2-base-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.98,"AskUbuntuDupQuestions":62.25,"MindSmallReranking":30.54,"SciDocsRR":83.1,"StackOverflowDupQuestions":52.05} -{"index":284,"Rank":81,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.72,"AskUbuntuDupQuestions":62.18,"MindSmallReranking":29.93,"SciDocsRR":83.25,"StackOverflowDupQuestions":51.53} -{"index":238,"Rank":82,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":56.66,"AskUbuntuDupQuestions":63.23,"MindSmallReranking":31.93,"SciDocsRR":77.96,"StackOverflowDupQuestions":53.5} -{"index":93,"Rank":83,"Model":"e5-large-v2-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.61,"AskUbuntuDupQuestions":59.62,"MindSmallReranking":31.83,"SciDocsRR":84.31,"StackOverflowDupQuestions":50.68} -{"index":155,"Rank":84,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":56.61,"AskUbuntuDupQuestions":59.62,"MindSmallReranking":31.83,"SciDocsRR":84.31,"StackOverflowDupQuestions":50.68} -{"index":83,"Rank":85,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.56,"AskUbuntuDupQuestions":61.63,"MindSmallReranking":32.29,"SciDocsRR":80.79,"StackOverflowDupQuestions":51.53} -{"index":154,"Rank":86,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":56.53,"AskUbuntuDupQuestions":60.07,"MindSmallReranking":30.78,"SciDocsRR":83.94,"StackOverflowDupQuestions":51.33} -{"index":246,"Rank":87,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":56.42,"AskUbuntuDupQuestions":66.16,"MindSmallReranking":30.6,"SciDocsRR":76.09,"StackOverflowDupQuestions":52.85} -{"index":172,"Rank":88,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.42,"AskUbuntuDupQuestions":62.83,"MindSmallReranking":31.48,"SciDocsRR":80.97,"StackOverflowDupQuestions":50.38} -{"index":281,"Rank":89,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.32,"AskUbuntuDupQuestions":62.05,"MindSmallReranking":31.45,"SciDocsRR":81.22,"StackOverflowDupQuestions":50.54} -{"index":147,"Rank":90,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":56.2,"AskUbuntuDupQuestions":63.17,"MindSmallReranking":31.82,"SciDocsRR":78.83,"StackOverflowDupQuestions":50.99} -{"index":33,"Rank":91,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.09,"AskUbuntuDupQuestions":60.11,"MindSmallReranking":31.87,"SciDocsRR":81.62,"StackOverflowDupQuestions":50.76} -{"index":237,"Rank":92,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":55.96,"AskUbuntuDupQuestions":63.08,"MindSmallReranking":31.5,"SciDocsRR":76.49,"StackOverflowDupQuestions":52.79} -{"index":169,"Rank":93,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.91,"AskUbuntuDupQuestions":61.6,"MindSmallReranking":31.22,"SciDocsRR":80.27,"StackOverflowDupQuestions":50.55} -{"index":140,"Rank":94,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":55.91,"AskUbuntuDupQuestions":58.98,"MindSmallReranking":31.29,"SciDocsRR":83.02,"StackOverflowDupQuestions":50.34} -{"index":153,"Rank":95,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":55.91,"AskUbuntuDupQuestions":58.98,"MindSmallReranking":31.29,"SciDocsRR":83.02,"StackOverflowDupQuestions":50.34} -{"index":160,"Rank":96,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":55.86,"AskUbuntuDupQuestions":60.28,"MindSmallReranking":31.42,"SciDocsRR":82.04,"StackOverflowDupQuestions":49.72} -{"index":171,"Rank":97,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.84,"AskUbuntuDupQuestions":62.4,"MindSmallReranking":31.56,"SciDocsRR":79.31,"StackOverflowDupQuestions":50.11} -{"index":213,"Rank":98,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.78,"AskUbuntuDupQuestions":61.71,"MindSmallReranking":30.31,"SciDocsRR":80.61,"StackOverflowDupQuestions":50.47} -{"index":107,"Rank":99,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.77,"AskUbuntuDupQuestions":59.83,"MindSmallReranking":32.06,"SciDocsRR":81.54,"StackOverflowDupQuestions":49.65} -{"index":152,"Rank":100,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":55.7,"AskUbuntuDupQuestions":59.66,"MindSmallReranking":30.07,"SciDocsRR":82.9,"StackOverflowDupQuestions":50.15} -{"index":28,"Rank":101,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.69,"AskUbuntuDupQuestions":60.6,"MindSmallReranking":30.81,"SciDocsRR":81.45,"StackOverflowDupQuestions":49.9} -{"index":26,"Rank":102,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.69,"AskUbuntuDupQuestions":60.6,"MindSmallReranking":30.81,"SciDocsRR":81.45,"StackOverflowDupQuestions":49.9} -{"index":29,"Rank":103,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.69,"AskUbuntuDupQuestions":60.6,"MindSmallReranking":30.81,"SciDocsRR":81.45,"StackOverflowDupQuestions":49.9} -{"index":27,"Rank":104,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.69,"AskUbuntuDupQuestions":60.6,"MindSmallReranking":30.81,"SciDocsRR":81.45,"StackOverflowDupQuestions":49.9} -{"index":206,"Rank":105,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":55.69,"AskUbuntuDupQuestions":60.6,"MindSmallReranking":30.81,"SciDocsRR":81.45,"StackOverflowDupQuestions":49.9} -{"index":129,"Rank":106,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":55.69,"AskUbuntuDupQuestions":60.6,"MindSmallReranking":30.81,"SciDocsRR":81.45,"StackOverflowDupQuestions":49.9} -{"index":123,"Rank":107,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.65,"AskUbuntuDupQuestions":59.97,"MindSmallReranking":31.79,"SciDocsRR":79.77,"StackOverflowDupQuestions":51.07} -{"index":211,"Rank":108,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":55.65,"AskUbuntuDupQuestions":61.6,"MindSmallReranking":30.34,"SciDocsRR":80.33,"StackOverflowDupQuestions":50.32} -{"index":184,"Rank":109,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.6,"AskUbuntuDupQuestions":61.11,"MindSmallReranking":28.83,"SciDocsRR":85.93,"StackOverflowDupQuestions":46.52} -{"index":183,"Rank":110,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.5,"AskUbuntuDupQuestions":58.96,"MindSmallReranking":29.32,"SciDocsRR":88.03,"StackOverflowDupQuestions":45.71} -{"index":101,"Rank":111,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.47,"AskUbuntuDupQuestions":61.36,"MindSmallReranking":29.91,"SciDocsRR":79.23,"StackOverflowDupQuestions":51.38} -{"index":99,"Rank":112,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.46,"AskUbuntuDupQuestions":60.35,"MindSmallReranking":29.08,"SciDocsRR":81.56,"StackOverflowDupQuestions":50.87} -{"index":121,"Rank":113,"Model":"snowflake-arctic-embed-l-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.46,"AskUbuntuDupQuestions":60.35,"MindSmallReranking":29.08,"SciDocsRR":81.56,"StackOverflowDupQuestions":50.87} -{"index":100,"Rank":114,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.42,"AskUbuntuDupQuestions":60.53,"MindSmallReranking":29.17,"SciDocsRR":80.81,"StackOverflowDupQuestions":51.19} -{"index":66,"Rank":115,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":55.38,"AskUbuntuDupQuestions":60.71,"MindSmallReranking":31.96,"SciDocsRR":79.23,"StackOverflowDupQuestions":49.61} -{"index":236,"Rank":116,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":55.36,"AskUbuntuDupQuestions":61.64,"MindSmallReranking":31.84,"SciDocsRR":76.39,"StackOverflowDupQuestions":51.58} -{"index":69,"Rank":117,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.34,"AskUbuntuDupQuestions":59.7,"MindSmallReranking":31.78,"SciDocsRR":80.04,"StackOverflowDupQuestions":49.83} -{"index":176,"Rank":118,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.32,"AskUbuntuDupQuestions":61.19,"MindSmallReranking":30.61,"SciDocsRR":79.3,"StackOverflowDupQuestions":50.18} -{"index":103,"Rank":119,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.31,"AskUbuntuDupQuestions":61.15,"MindSmallReranking":30.6,"SciDocsRR":79.34,"StackOverflowDupQuestions":50.16} -{"index":128,"Rank":120,"Model":"cai-stellaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.24,"AskUbuntuDupQuestions":60.35,"MindSmallReranking":30.98,"SciDocsRR":81.45,"StackOverflowDupQuestions":48.17} -{"index":210,"Rank":121,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":55.18,"AskUbuntuDupQuestions":61.34,"MindSmallReranking":30.04,"SciDocsRR":79.4,"StackOverflowDupQuestions":49.95} -{"index":177,"Rank":122,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.09,"AskUbuntuDupQuestions":59.62,"MindSmallReranking":30.99,"SciDocsRR":79.76,"StackOverflowDupQuestions":49.99} -{"index":136,"Rank":123,"Model":"jina-embeddings-v2-small-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.09,"AskUbuntuDupQuestions":59.62,"MindSmallReranking":30.99,"SciDocsRR":79.76,"StackOverflowDupQuestions":49.99} -{"index":35,"Rank":124,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.06,"AskUbuntuDupQuestions":58.54,"MindSmallReranking":31.36,"SciDocsRR":79.9,"StackOverflowDupQuestions":50.45} -{"index":208,"Rank":125,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.99,"AskUbuntuDupQuestions":59.57,"MindSmallReranking":30.6,"SciDocsRR":79.91,"StackOverflowDupQuestions":49.87} -{"index":167,"Rank":126,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.9,"AskUbuntuDupQuestions":59.92,"MindSmallReranking":30.97,"SciDocsRR":78.62,"StackOverflowDupQuestions":50.07} -{"index":159,"Rank":127,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":54.84,"AskUbuntuDupQuestions":58.23,"MindSmallReranking":30.97,"SciDocsRR":80.74,"StackOverflowDupQuestions":49.41} -{"index":245,"Rank":128,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":54.71,"AskUbuntuDupQuestions":62.86,"MindSmallReranking":29.77,"SciDocsRR":75.16,"StackOverflowDupQuestions":51.05} -{"index":82,"Rank":129,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.67,"AskUbuntuDupQuestions":59.63,"MindSmallReranking":31.72,"SciDocsRR":77.72,"StackOverflowDupQuestions":49.61} -{"index":174,"Rank":130,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.64,"AskUbuntuDupQuestions":60.56,"MindSmallReranking":30.4,"SciDocsRR":78.09,"StackOverflowDupQuestions":49.5} -{"index":132,"Rank":131,"Model":"embedder-100p<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.63,"AskUbuntuDupQuestions":61.05,"MindSmallReranking":30.55,"SciDocsRR":79.83,"StackOverflowDupQuestions":47.1} -{"index":134,"Rank":132,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":54.58,"AskUbuntuDupQuestions":63.09,"MindSmallReranking":30.89,"SciDocsRR":74.28,"StackOverflowDupQuestions":50.06} -{"index":209,"Rank":133,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":54.56,"AskUbuntuDupQuestions":61.16,"MindSmallReranking":30.02,"SciDocsRR":78.05,"StackOverflowDupQuestions":49.0} -{"index":267,"Rank":134,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.46,"AskUbuntuDupQuestions":57.69,"MindSmallReranking":30.32,"SciDocsRR":79.91,"StackOverflowDupQuestions":49.93} -{"index":157,"Rank":135,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":54.45,"AskUbuntuDupQuestions":59.36,"MindSmallReranking":29.56,"SciDocsRR":79.76,"StackOverflowDupQuestions":49.13} -{"index":104,"Rank":136,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.4,"AskUbuntuDupQuestions":60.25,"MindSmallReranking":30.26,"SciDocsRR":76.94,"StackOverflowDupQuestions":50.16} -{"index":158,"Rank":137,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.32,"AskUbuntuDupQuestions":58.11,"MindSmallReranking":30.1,"SciDocsRR":79.16,"StackOverflowDupQuestions":49.93} -{"index":106,"Rank":138,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.29,"AskUbuntuDupQuestions":58.08,"MindSmallReranking":31.07,"SciDocsRR":78.57,"StackOverflowDupQuestions":49.43} -{"index":262,"Rank":139,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.29,"AskUbuntuDupQuestions":57.59,"MindSmallReranking":31.29,"SciDocsRR":82.14,"StackOverflowDupQuestions":46.13} -{"index":166,"Rank":140,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.28,"AskUbuntuDupQuestions":59.5,"MindSmallReranking":30.51,"SciDocsRR":78.11,"StackOverflowDupQuestions":49.0} -{"index":105,"Rank":141,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.28,"AskUbuntuDupQuestions":58.09,"MindSmallReranking":31.29,"SciDocsRR":78.78,"StackOverflowDupQuestions":48.96} -{"index":235,"Rank":142,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":54.23,"AskUbuntuDupQuestions":60.86,"MindSmallReranking":31.33,"SciDocsRR":73.71,"StackOverflowDupQuestions":51.01} -{"index":203,"Rank":143,"Model":"bge-small-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.14,"AskUbuntuDupQuestions":56.97,"MindSmallReranking":31.38,"SciDocsRR":79.46,"StackOverflowDupQuestions":48.75} -{"index":244,"Rank":144,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":54.0,"AskUbuntuDupQuestions":61.51,"MindSmallReranking":30.27,"SciDocsRR":74.88,"StackOverflowDupQuestions":49.34} -{"index":79,"Rank":145,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.0,"AskUbuntuDupQuestions":58.13,"MindSmallReranking":31.34,"SciDocsRR":77.21,"StackOverflowDupQuestions":49.32} -{"index":65,"Rank":146,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":53.98,"AskUbuntuDupQuestions":58.6,"MindSmallReranking":29.73,"SciDocsRR":77.81,"StackOverflowDupQuestions":49.8} -{"index":44,"Rank":147,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.95,"AskUbuntuDupQuestions":58.79,"MindSmallReranking":32.02,"SciDocsRR":78.54,"StackOverflowDupQuestions":46.44} -{"index":162,"Rank":148,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":53.87,"AskUbuntuDupQuestions":57.88,"MindSmallReranking":30.28,"SciDocsRR":78.13,"StackOverflowDupQuestions":49.2} -{"index":242,"Rank":149,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":53.8,"AskUbuntuDupQuestions":60.16,"MindSmallReranking":30.15,"SciDocsRR":78.09,"StackOverflowDupQuestions":46.79} -{"index":113,"Rank":150,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.64,"AskUbuntuDupQuestions":58.54,"MindSmallReranking":30.98,"SciDocsRR":77.2,"StackOverflowDupQuestions":47.85} -{"index":241,"Rank":151,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":53.62,"AskUbuntuDupQuestions":60.49,"MindSmallReranking":30.37,"SciDocsRR":77.78,"StackOverflowDupQuestions":45.85} -{"index":68,"Rank":152,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.49,"AskUbuntuDupQuestions":57.49,"MindSmallReranking":31.78,"SciDocsRR":77.89,"StackOverflowDupQuestions":46.78} -{"index":212,"Rank":153,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":53.42,"AskUbuntuDupQuestions":60.79,"MindSmallReranking":29.7,"SciDocsRR":75.79,"StackOverflowDupQuestions":47.42} -{"index":207,"Rank":154,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.32,"AskUbuntuDupQuestions":57.59,"MindSmallReranking":31.29,"SciDocsRR":75.51,"StackOverflowDupQuestions":48.89} -{"index":285,"Rank":155,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":53.27,"AskUbuntuDupQuestions":58.31,"MindSmallReranking":30.75,"SciDocsRR":75.62,"StackOverflowDupQuestions":48.4} -{"index":214,"Rank":156,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":53.14,"AskUbuntuDupQuestions":56.69,"MindSmallReranking":31.58,"SciDocsRR":76.51,"StackOverflowDupQuestions":47.78} -{"index":63,"Rank":157,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":53.09,"AskUbuntuDupQuestions":57.16,"MindSmallReranking":30.1,"SciDocsRR":76.28,"StackOverflowDupQuestions":48.82} -{"index":243,"Rank":158,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":53.09,"AskUbuntuDupQuestions":59.73,"MindSmallReranking":30.2,"SciDocsRR":73.96,"StackOverflowDupQuestions":48.46} -{"index":173,"Rank":159,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.07,"AskUbuntuDupQuestions":60.25,"MindSmallReranking":30.68,"SciDocsRR":74.16,"StackOverflowDupQuestions":47.18} -{"index":61,"Rank":160,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":52.95,"AskUbuntuDupQuestions":55.56,"MindSmallReranking":30.86,"SciDocsRR":77.62,"StackOverflowDupQuestions":47.77} -{"index":168,"Rank":161,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.6,"AskUbuntuDupQuestions":57.38,"MindSmallReranking":30.52,"SciDocsRR":75.13,"StackOverflowDupQuestions":47.38} -{"index":84,"Rank":162,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.33,"AskUbuntuDupQuestions":55.9,"MindSmallReranking":31.11,"SciDocsRR":77.54,"StackOverflowDupQuestions":44.77} -{"index":239,"Rank":163,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":51.84,"AskUbuntuDupQuestions":58.99,"MindSmallReranking":27.13,"SciDocsRR":72.78,"StackOverflowDupQuestions":48.48} -{"index":77,"Rank":164,"Model":"test24<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.68,"AskUbuntuDupQuestions":56.48,"MindSmallReranking":29.57,"SciDocsRR":74.66,"StackOverflowDupQuestions":45.99} -{"index":80,"Rank":165,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.58,"AskUbuntuDupQuestions":55.84,"MindSmallReranking":30.4,"SciDocsRR":71.34,"StackOverflowDupQuestions":44.74} -{"index":78,"Rank":166,"Model":"test25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.55,"AskUbuntuDupQuestions":55.77,"MindSmallReranking":29.04,"SciDocsRR":72.91,"StackOverflowDupQuestions":44.49} -{"index":72,"Rank":167,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.29,"AskUbuntuDupQuestions":54.09,"MindSmallReranking":30.28,"SciDocsRR":74.48,"StackOverflowDupQuestions":42.3} -{"index":71,"Rank":168,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.41,"AskUbuntuDupQuestions":53.77,"MindSmallReranking":29.92,"SciDocsRR":70.82,"StackOverflowDupQuestions":43.14} -{"index":277,"Rank":169,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.02,"AskUbuntuDupQuestions":53.49,"MindSmallReranking":30.71,"SciDocsRR":71.04,"StackOverflowDupQuestions":40.85} -{"index":234,"Rank":170,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":48.82,"AskUbuntuDupQuestions":53.75,"MindSmallReranking":30.39,"SciDocsRR":69.22,"StackOverflowDupQuestions":41.92} -{"index":70,"Rank":171,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.51,"AskUbuntuDupQuestions":52.34,"MindSmallReranking":30.09,"SciDocsRR":71.04,"StackOverflowDupQuestions":40.57} -{"index":227,"Rank":172,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":48.42,"AskUbuntuDupQuestions":52.75,"MindSmallReranking":29.81,"SciDocsRR":68.72,"StackOverflowDupQuestions":42.42} -{"index":231,"Rank":173,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":48.1,"AskUbuntuDupQuestions":50.07,"MindSmallReranking":24.8,"SciDocsRR":81.31,"StackOverflowDupQuestions":36.22} -{"index":127,"Rank":174,"Model":"cai-lunaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.85,"AskUbuntuDupQuestions":53.44,"MindSmallReranking":28.54,"SciDocsRR":68.65,"StackOverflowDupQuestions":40.76} -{"index":67,"Rank":175,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":47.7,"AskUbuntuDupQuestions":52.7,"MindSmallReranking":29.52,"SciDocsRR":67.76,"StackOverflowDupQuestions":40.82} -{"index":81,"Rank":176,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.56,"AskUbuntuDupQuestions":52.63,"MindSmallReranking":29.27,"SciDocsRR":68.36,"StackOverflowDupQuestions":39.97} -{"index":217,"Rank":177,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":47.54,"AskUbuntuDupQuestions":51.8,"MindSmallReranking":29.3,"SciDocsRR":70.14,"StackOverflowDupQuestions":38.9} -{"index":256,"Rank":178,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.31,"AskUbuntuDupQuestions":50.09,"MindSmallReranking":29.01,"SciDocsRR":70.94,"StackOverflowDupQuestions":39.18} -{"index":218,"Rank":179,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":46.47,"AskUbuntuDupQuestions":51.57,"MindSmallReranking":28.62,"SciDocsRR":66.33,"StackOverflowDupQuestions":39.35} -{"index":141,"Rank":180,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.18,"AskUbuntuDupQuestions":50.89,"MindSmallReranking":26.88,"SciDocsRR":68.36,"StackOverflowDupQuestions":38.61} -{"index":233,"Rank":181,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":44.75,"AskUbuntuDupQuestions":50.88,"MindSmallReranking":28.92,"SciDocsRR":63.55,"StackOverflowDupQuestions":35.65} -{"index":122,"Rank":182,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":43.44,"AskUbuntuDupQuestions":45.84,"MindSmallReranking":28.37,"SciDocsRR":64.94,"StackOverflowDupQuestions":34.62} -{"index":232,"Rank":183,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":43.29,"AskUbuntuDupQuestions":49.57,"MindSmallReranking":27.01,"SciDocsRR":62.56,"StackOverflowDupQuestions":34.03} -{"index":11,"Rank":184,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":41.44,"AskUbuntuDupQuestions":48.99,"MindSmallReranking":24.79,"SciDocsRR":54.99,"StackOverflowDupQuestions":36.98} -{"index":255,"Rank":185,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":34.44,"AskUbuntuDupQuestions":44.88,"MindSmallReranking":25.65,"SciDocsRR":43.9,"StackOverflowDupQuestions":23.35} -{"index":263,"Rank":186,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":0.53,"AskUbuntuDupQuestions":0.56,"MindSmallReranking":0.31,"SciDocsRR":0.79,"StackOverflowDupQuestions":0.47} -{"index":2,"Rank":187,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":"","MindSmallReranking":"","SciDocsRR":88.87,"StackOverflowDupQuestions":""} -{"index":30,"Rank":197,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":53.99,"MindSmallReranking":"","SciDocsRR":"","StackOverflowDupQuestions":""} -{"index":73,"Rank":217,"Model":"gte-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":55.31,"MindSmallReranking":"","SciDocsRR":"","StackOverflowDupQuestions":""} -{"index":76,"Rank":220,"Model":"gte-micro-v4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":58.01,"MindSmallReranking":"","SciDocsRR":"","StackOverflowDupQuestions":""} -{"index":97,"Rank":230,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":48.66,"MindSmallReranking":"","SciDocsRR":66.69,"StackOverflowDupQuestions":39.5} -{"index":98,"Rank":231,"Model":"yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":59.82,"MindSmallReranking":"","SciDocsRR":"","StackOverflowDupQuestions":""} -{"index":124,"Rank":235,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":"","AskUbuntuDupQuestions":61.53,"MindSmallReranking":"","SciDocsRR":"","StackOverflowDupQuestions":""} -{"index":248,"Rank":264,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":57.91,"MindSmallReranking":"","SciDocsRR":70.21,"StackOverflowDupQuestions":44.9} -{"index":251,"Rank":267,"Model":"Angle_BERT<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":51.34,"MindSmallReranking":"","SciDocsRR":70.59,"StackOverflowDupQuestions":40.99} -{"index":270,"Rank":277,"Model":"gte-small-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":60.76,"MindSmallReranking":"","SciDocsRR":84.34,"StackOverflowDupQuestions":51.68} -{"index":278,"Rank":284,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":54.68,"MindSmallReranking":"","SciDocsRR":72.78,"StackOverflowDupQuestions":40.65} -{"index":279,"Rank":285,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":55.09,"MindSmallReranking":"","SciDocsRR":70.93,"StackOverflowDupQuestions":42.42} -{"index":280,"Rank":286,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":53.56,"MindSmallReranking":"","SciDocsRR":68.7,"StackOverflowDupQuestions":39.41} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":74.79,"AlloprofReranking":57.37,"AlloprofReranking (fra-Latn)":69.44,"SyntecReranking":86.9,"SyntecReranking (fra-Latn)":85.45} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":73.68,"AlloprofReranking":58.1,"AlloprofReranking (fra-Latn)":65.9,"SyntecReranking":85.43,"SyntecReranking (fra-Latn)":85.31} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":72.12,"AlloprofReranking":56.17,"AlloprofReranking (fra-Latn)":64.41,"SyntecReranking":86.7,"SyntecReranking (fra-Latn)":81.22} +{"Rank":4,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":71.44,"AlloprofReranking":54.34,"AlloprofReranking (fra-Latn)":67.2,"SyntecReranking":83.23,"SyntecReranking (fra-Latn)":80.97} +{"Rank":5,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":64.74,"AlloprofReranking":49.01,"AlloprofReranking (fra-Latn)":62.42,"SyntecReranking":75.03,"SyntecReranking (fra-Latn)":72.5} +{"Rank":6,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":61.44,"AlloprofReranking":49.51,"AlloprofReranking (fra-Latn)":55.37,"SyntecReranking":73.28,"SyntecReranking (fra-Latn)":67.62} +{"Rank":7,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":55.3,"AlloprofReranking":31.69,"AlloprofReranking (fra-Latn)":62.62,"SyntecReranking":59.57,"SyntecReranking (fra-Latn)":67.31} +{"Rank":8,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":72.36,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":88.57,"SyntecReranking (fra-Latn)":null} +{"Rank":9,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":63.54,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":82.65,"SyntecReranking (fra-Latn)":null} +{"Rank":10,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":70.79,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":86.77,"SyntecReranking (fra-Latn)":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":72.92,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":91.2,"SyntecReranking (fra-Latn)":null} +{"Rank":12,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":74.78,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":90.4,"SyntecReranking (fra-Latn)":null} +{"Rank":13,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"AlloprofReranking":35.29,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":55.93,"SyntecReranking (fra-Latn)":null} +{"Rank":14,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":51.6,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":88.03,"SyntecReranking (fra-Latn)":null} +{"Rank":15,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":51.01,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":85.72,"SyntecReranking (fra-Latn)":null} +{"Rank":16,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":17,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":19,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofReranking":36.21,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":53.25,"SyntecReranking (fra-Latn)":null} +{"Rank":20,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofReranking":36.21,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":53.25,"SyntecReranking (fra-Latn)":null} +{"Rank":21,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofReranking":36.25,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":53.25,"SyntecReranking (fra-Latn)":null} +{"Rank":22,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":77.95,"SyntecReranking":null,"SyntecReranking (fra-Latn)":83.32} +{"Rank":23,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"AlloprofReranking":53.0,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":82.9,"SyntecReranking (fra-Latn)":null} +{"Rank":24,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":25,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofReranking":48.68,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":79.75,"SyntecReranking (fra-Latn)":null} +{"Rank":26,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"AlloprofReranking":57.62,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":88.15,"SyntecReranking (fra-Latn)":null} +{"Rank":27,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":28,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":29,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofReranking":34.81,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":55.88,"SyntecReranking (fra-Latn)":null} +{"Rank":30,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofReranking":34.55,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":57.18,"SyntecReranking (fra-Latn)":null} +{"Rank":31,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"AlloprofReranking":26.29,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":42.8,"SyntecReranking (fra-Latn)":null} +{"Rank":32,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofReranking":36.23,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":53.25,"SyntecReranking (fra-Latn)":null} +{"Rank":33,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofReranking":38.85,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":66.4,"SyntecReranking (fra-Latn)":null} +{"Rank":34,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"AlloprofReranking":47.36,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":77.05,"SyntecReranking (fra-Latn)":null} +{"Rank":35,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":39.13,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":62.58,"SyntecReranking (fra-Latn)":null} +{"Rank":36,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":28.75,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":50.88,"SyntecReranking (fra-Latn)":null} +{"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":67.01,"SyntecReranking":null,"SyntecReranking (fra-Latn)":69.17} +{"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":69.63,"SyntecReranking":null,"SyntecReranking (fra-Latn)":66.12} +{"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":40,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":41,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"AlloprofReranking":51.77,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":74.78,"SyntecReranking (fra-Latn)":null} +{"Rank":42,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":43,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"AlloprofReranking":40.28,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":65.08,"SyntecReranking (fra-Latn)":null} +{"Rank":45,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofReranking":50.12,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":78.05,"SyntecReranking (fra-Latn)":null} +{"Rank":46,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofReranking":57.99,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":79.77,"SyntecReranking (fra-Latn)":null} +{"Rank":47,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AlloprofReranking":63.3,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":83.07,"SyntecReranking (fra-Latn)":null} +{"Rank":48,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"AlloprofReranking":68.36,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":85.15,"SyntecReranking (fra-Latn)":null} +{"Rank":49,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":50,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":51,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":51.48,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":70.28,"SyntecReranking (fra-Latn)":null} +{"Rank":52,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":56.23,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":73.85,"SyntecReranking (fra-Latn)":null} +{"Rank":53,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":55.39,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":77.13,"SyntecReranking (fra-Latn)":null} +{"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"AlloprofReranking":25.58,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":43.75,"SyntecReranking (fra-Latn)":null} +{"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"AlloprofReranking":28.62,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":49.4,"SyntecReranking (fra-Latn)":null} +{"Rank":56,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":89.87,"SyntecReranking (fra-Latn)":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} diff --git a/all_data_tasks/30/default.jsonl b/all_data_tasks/30/default.jsonl index 03ddccc8e453898a7ed89693ccd103d10018beb1..f5cd8b5a3e42faa2c53f10489b18506947dad554 100644 --- a/all_data_tasks/30/default.jsonl +++ b/all_data_tasks/30/default.jsonl @@ -1,31 +1,23 @@ -{"index":3,"Rank":1,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":89.62,"CDSC-E":78.23,"PPC":95.43,"PSC":99.24,"SICK-E-PL":85.58} -{"index":39,"Rank":2,"Model":"mmlw-roberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":89.13,"CDSC-E":79.87,"PPC":93.56,"PSC":98.63,"SICK-E-PL":84.47} -{"index":2,"Rank":3,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":88.48,"CDSC-E":76.75,"PPC":94.1,"PSC":99.37,"SICK-E-PL":83.68} -{"index":34,"Rank":4,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.48,"CDSC-E":76.75,"PPC":94.1,"PSC":99.37,"SICK-E-PL":83.68} -{"index":22,"Rank":5,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.48,"CDSC-E":76.75,"PPC":94.1,"PSC":99.37,"SICK-E-PL":83.68} -{"index":38,"Rank":6,"Model":"mmlw-roberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.14,"CDSC-E":79.1,"PPC":92.81,"PSC":98.61,"SICK-E-PL":82.04} -{"index":19,"Rank":7,"Model":"st-polish-kartonberta-base-alpha-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.92,"CDSC-E":79.12,"PPC":92.65,"PSC":98.42,"SICK-E-PL":81.47} -{"index":41,"Rank":8,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":87.04,"CDSC-E":75.06,"PPC":93.49,"PSC":99.05,"SICK-E-PL":80.56} -{"index":40,"Rank":9,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":87.0,"CDSC-E":75.99,"PPC":93.29,"PSC":99.1,"SICK-E-PL":79.63} -{"index":52,"Rank":10,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":86.23,"CDSC-E":75.76,"PPC":93.67,"PSC":98.26,"SICK-E-PL":77.22} -{"index":35,"Rank":11,"Model":"mmlw-e5-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.16,"CDSC-E":77.27,"PPC":91.76,"PSC":99.15,"SICK-E-PL":76.45} -{"index":36,"Rank":12,"Model":"mmlw-e5-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.9,"CDSC-E":76.83,"PPC":91.09,"PSC":99.53,"SICK-E-PL":76.14} -{"index":26,"Rank":13,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":85.5,"CDSC-E":74.47,"PPC":92.18,"PSC":99.39,"SICK-E-PL":75.96} -{"index":1,"Rank":14,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.87,"CDSC-E":72.42,"PPC":91.47,"PSC":99.28,"SICK-E-PL":76.32} -{"index":33,"Rank":15,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.87,"CDSC-E":72.42,"PPC":91.47,"PSC":99.28,"SICK-E-PL":76.32} -{"index":37,"Rank":16,"Model":"mmlw-e5-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.61,"CDSC-E":76.68,"PPC":89.7,"PSC":98.86,"SICK-E-PL":73.21} -{"index":51,"Rank":17,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":83.28,"CDSC-E":72.22,"PPC":91.8,"PSC":97.14,"SICK-E-PL":71.94} -{"index":25,"Rank":18,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":82.15,"CDSC-E":72.67,"PPC":88.01,"PSC":99.14,"SICK-E-PL":68.77} -{"index":28,"Rank":19,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":80.5,"CDSC-E":69.7,"PPC":86.72,"PSC":99.24,"SICK-E-PL":66.34} -{"index":42,"Rank":20,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":79.27,"CDSC-E":68.91,"PPC":86.97,"PSC":97.42,"SICK-E-PL":63.77} -{"index":48,"Rank":21,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":79.26,"CDSC-E":71.83,"PPC":86.83,"PSC":96.35,"SICK-E-PL":62.05} -{"index":30,"Rank":22,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":77.33,"CDSC-E":67.35,"PPC":85.33,"PSC":98.46,"SICK-E-PL":58.19} -{"index":29,"Rank":23,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":75.32,"CDSC-E":63.31,"PPC":84.18,"PSC":98.87,"SICK-E-PL":54.93} -{"index":23,"Rank":24,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.2,"CDSC-E":59.97,"PPC":85.37,"PSC":91.98,"SICK-E-PL":55.48} -{"index":4,"Rank":25,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.51,"CDSC-E":43.51,"PPC":70.19,"PSC":77.83,"SICK-E-PL":46.51} -{"index":8,"Rank":30,"Model":"2024-06-15_10-09-42<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-E":0.43,"PPC":"","PSC":0.4,"SICK-E-PL":0.55} -{"index":9,"Rank":31,"Model":"2024-06-17_21-37-12<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-E":0.37,"PPC":"","PSC":0.38,"SICK-E-PL":0.42} -{"index":10,"Rank":32,"Model":"2024-06-19_08-22-22<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-E":0.46,"PPC":"","PSC":0.41,"SICK-E-PL":0.51} -{"index":11,"Rank":33,"Model":"2024-06-19_10-03-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-E":0.44,"PPC":"","PSC":0.75,"SICK-E-PL":0.52} -{"index":12,"Rank":34,"Model":"2024-06-19_21-12-17<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-E":0.41,"PPC":"","PSC":0.38,"SICK-E-PL":0.44} -{"index":13,"Rank":35,"Model":"2024-06-19_22-23-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-E":0.56,"PPC":"","PSC":0.85,"SICK-E-PL":0.61} +{"Rank":1,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":85.17,"CDSC-E":75.76,"CDSC-E (pol-Latn)":75.77,"PPC":93.67,"PSC":98.26,"PSC (pol-Latn)":98.26,"SICK-E-PL":77.22,"SICK-E-PL (pol-Latn)":77.22} +{"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":84.55,"CDSC-E":74.47,"CDSC-E (pol-Latn)":74.47,"PPC":92.18,"PSC":99.39,"PSC (pol-Latn)":99.4,"SICK-E-PL":75.96,"SICK-E-PL (pol-Latn)":75.95} +{"Rank":3,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":82.06,"CDSC-E":72.22,"CDSC-E (pol-Latn)":72.22,"PPC":91.8,"PSC":97.14,"PSC (pol-Latn)":97.14,"SICK-E-PL":71.94,"SICK-E-PL (pol-Latn)":71.94} +{"Rank":4,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":81.31,"CDSC-E":72.67,"CDSC-E (pol-Latn)":72.7,"PPC":88.01,"PSC":99.14,"PSC (pol-Latn)":99.14,"SICK-E-PL":68.77,"SICK-E-PL (pol-Latn)":68.76} +{"Rank":5,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":79.61,"CDSC-E":69.7,"CDSC-E (pol-Latn)":69.69,"PPC":86.72,"PSC":99.24,"PSC (pol-Latn)":99.23,"SICK-E-PL":66.34,"SICK-E-PL (pol-Latn)":66.35} +{"Rank":6,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":78.17,"CDSC-E":68.91,"CDSC-E (pol-Latn)":68.92,"PPC":86.97,"PSC":97.42,"PSC (pol-Latn)":97.42,"SICK-E-PL":63.77,"SICK-E-PL (pol-Latn)":63.77} +{"Rank":7,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":null,"PPC":null,"PSC":null,"PSC (pol-Latn)":null,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":null} +{"Rank":8,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":72.65,"PPC":null,"PSC":null,"PSC (pol-Latn)":99.43,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":75.98} +{"Rank":9,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"CDSC-E":63.31,"CDSC-E (pol-Latn)":null,"PPC":84.18,"PSC":98.87,"PSC (pol-Latn)":null,"SICK-E-PL":54.93,"SICK-E-PL (pol-Latn)":null} +{"Rank":10,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"CDSC-E":67.35,"CDSC-E (pol-Latn)":null,"PPC":85.33,"PSC":98.46,"PSC (pol-Latn)":null,"SICK-E-PL":58.19,"SICK-E-PL (pol-Latn)":null} +{"Rank":11,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"CDSC-E":75.99,"CDSC-E (pol-Latn)":null,"PPC":93.29,"PSC":99.1,"PSC (pol-Latn)":null,"SICK-E-PL":79.63,"SICK-E-PL (pol-Latn)":null} +{"Rank":12,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"CDSC-E":75.06,"CDSC-E (pol-Latn)":null,"PPC":93.49,"PSC":99.05,"PSC (pol-Latn)":null,"SICK-E-PL":80.56,"SICK-E-PL (pol-Latn)":null} +{"Rank":13,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":49.04,"PPC":null,"PSC":null,"PSC (pol-Latn)":87.92,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":49.63} +{"Rank":14,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":47.27,"PPC":null,"PSC":null,"PSC (pol-Latn)":81.87,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":47.32} +{"Rank":15,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":45.37,"PPC":null,"PSC":null,"PSC (pol-Latn)":83.28,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":46.51} +{"Rank":16,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":null,"PPC":null,"PSC":null,"PSC (pol-Latn)":null,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":null} +{"Rank":17,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":null,"PPC":null,"PSC":null,"PSC (pol-Latn)":null,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":null} +{"Rank":18,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"CDSC-E":71.83,"CDSC-E (pol-Latn)":null,"PPC":86.83,"PSC":96.35,"PSC (pol-Latn)":null,"SICK-E-PL":62.05,"SICK-E-PL (pol-Latn)":null} +{"Rank":19,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":null,"PPC":null,"PSC":null,"PSC (pol-Latn)":null,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":null} +{"Rank":20,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":null,"PPC":null,"PSC":null,"PSC (pol-Latn)":null,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":null} +{"Rank":21,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":null,"PPC":null,"PSC":null,"PSC (pol-Latn)":null,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":null} +{"Rank":22,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":null,"PPC":null,"PSC":null,"PSC (pol-Latn)":null,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":null} +{"Rank":23,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":null,"PPC":null,"PSC":null,"PSC (pol-Latn)":null,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":null} diff --git a/all_data_tasks/31/default.jsonl b/all_data_tasks/31/default.jsonl index 84f9b376201d2a9f173b78ae3e689f0a0a127f9b..643df4391399f084fd0bb15780c91dff0b65fc75 100644 --- a/all_data_tasks/31/default.jsonl +++ b/all_data_tasks/31/default.jsonl @@ -1,24 +1,23 @@ -{"index":3,"Rank":1,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.41,"ArguAna-PL":59.71,"DBPedia-PL":43.19,"FiQA-PL":46.12,"HotpotQA-PL":77.03,"MSMARCO-PL":72.69,"NFCorpus-PL":36.72,"NQ-PL":56.85,"Quora-PL":84.47,"SCIDOCS-PL":19.53,"SciFact-PL":74.43,"TRECCOVID-PL":82.75} -{"index":22,"Rank":2,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.69,"ArguAna-PL":59.87,"DBPedia-PL":41.3,"FiQA-PL":41.05,"HotpotQA-PL":67.61,"MSMARCO-PL":62.22,"NFCorpus-PL":30.89,"NQ-PL":48.45,"Quora-PL":83.21,"SCIDOCS-PL":16.08,"SciFact-PL":70.58,"TRECCOVID-PL":80.3} -{"index":2,"Rank":3,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":54.69,"ArguAna-PL":59.87,"DBPedia-PL":41.3,"FiQA-PL":41.05,"HotpotQA-PL":67.61,"MSMARCO-PL":62.22,"NFCorpus-PL":30.89,"NQ-PL":48.45,"Quora-PL":83.21,"SCIDOCS-PL":16.08,"SciFact-PL":70.58,"TRECCOVID-PL":80.3} -{"index":34,"Rank":4,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.69,"ArguAna-PL":59.87,"DBPedia-PL":41.3,"FiQA-PL":41.05,"HotpotQA-PL":67.61,"MSMARCO-PL":62.22,"NFCorpus-PL":30.89,"NQ-PL":48.45,"Quora-PL":83.21,"SCIDOCS-PL":16.08,"SciFact-PL":70.58,"TRECCOVID-PL":80.3} -{"index":39,"Rank":5,"Model":"mmlw-roberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.71,"ArguAna-PL":63.4,"DBPedia-PL":40.27,"FiQA-PL":40.89,"HotpotQA-PL":71.04,"MSMARCO-PL":36.63,"NFCorpus-PL":33.94,"NQ-PL":47.62,"Quora-PL":85.51,"SCIDOCS-PL":19.47,"SciFact-PL":70.23,"TRECCOVID-PL":70.81} -{"index":36,"Rank":6,"Model":"mmlw-e5-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.63,"ArguAna-PL":63.25,"DBPedia-PL":39.84,"FiQA-PL":39.9,"HotpotQA-PL":70.94,"MSMARCO-PL":36.47,"NFCorpus-PL":34.03,"NQ-PL":47.33,"Quora-PL":85.63,"SCIDOCS-PL":19.13,"SciFact-PL":71.21,"TRECCOVID-PL":71.18} -{"index":33,"Rank":7,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.88,"ArguAna-PL":56.48,"DBPedia-PL":36.98,"FiQA-PL":31.61,"HotpotQA-PL":60.9,"MSMARCO-PL":63.98,"NFCorpus-PL":30.1,"NQ-PL":44.2,"Quora-PL":81.43,"SCIDOCS-PL":16.59,"SciFact-PL":67.17,"TRECCOVID-PL":81.19} -{"index":1,"Rank":8,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.88,"ArguAna-PL":56.48,"DBPedia-PL":36.98,"FiQA-PL":31.61,"HotpotQA-PL":60.9,"MSMARCO-PL":63.98,"NFCorpus-PL":30.1,"NQ-PL":44.2,"Quora-PL":81.43,"SCIDOCS-PL":16.59,"SciFact-PL":67.17,"TRECCOVID-PL":81.19} -{"index":35,"Rank":9,"Model":"mmlw-e5-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.06,"ArguAna-PL":58.4,"DBPedia-PL":37.19,"FiQA-PL":34.53,"HotpotQA-PL":66.25,"MSMARCO-PL":32.54,"NFCorpus-PL":33.71,"NQ-PL":44.6,"Quora-PL":84.44,"SCIDOCS-PL":17.35,"SciFact-PL":68.29,"TRECCOVID-PL":73.33} -{"index":38,"Rank":10,"Model":"mmlw-roberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.92,"ArguAna-PL":59.02,"DBPedia-PL":36.22,"FiQA-PL":35.01,"HotpotQA-PL":66.64,"MSMARCO-PL":33.05,"NFCorpus-PL":34.14,"NQ-PL":45.65,"Quora-PL":84.44,"SCIDOCS-PL":17.84,"SciFact-PL":65.75,"TRECCOVID-PL":71.33} -{"index":26,"Rank":11,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":48.98,"ArguAna-PL":53.02,"DBPedia-PL":35.82,"FiQA-PL":33.0,"HotpotQA-PL":67.41,"MSMARCO-PL":33.38,"NFCorpus-PL":30.24,"NQ-PL":52.79,"Quora-PL":83.65,"SCIDOCS-PL":13.81,"SciFact-PL":65.66,"TRECCOVID-PL":70.03} -{"index":25,"Rank":12,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":44.01,"ArguAna-PL":42.81,"DBPedia-PL":30.23,"FiQA-PL":25.52,"HotpotQA-PL":63.52,"MSMARCO-PL":29.52,"NFCorpus-PL":25.98,"NQ-PL":44.8,"Quora-PL":81.22,"SCIDOCS-PL":12.35,"SciFact-PL":62.11,"TRECCOVID-PL":66.06} -{"index":37,"Rank":13,"Model":"mmlw-e5-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.83,"ArguAna-PL":54.31,"DBPedia-PL":30.28,"FiQA-PL":29.75,"HotpotQA-PL":57.14,"MSMARCO-PL":25.94,"NFCorpus-PL":27.6,"NQ-PL":33.83,"Quora-PL":81.15,"SCIDOCS-PL":14.79,"SciFact-PL":58.14,"TRECCOVID-PL":58.2} -{"index":28,"Rank":14,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":42.43,"ArguAna-PL":37.43,"DBPedia-PL":29.27,"FiQA-PL":22.03,"HotpotQA-PL":60.15,"MSMARCO-PL":26.94,"NFCorpus-PL":26.48,"NQ-PL":40.46,"Quora-PL":78.7,"SCIDOCS-PL":11.6,"SciFact-PL":62.76,"TRECCOVID-PL":70.92} -{"index":19,"Rank":15,"Model":"st-polish-kartonberta-base-alpha-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.19,"ArguAna-PL":56.06,"DBPedia-PL":27.0,"FiQA-PL":24.73,"HotpotQA-PL":50.61,"MSMARCO-PL":43.25,"NFCorpus-PL":31.15,"NQ-PL":28.89,"Quora-PL":83.59,"SCIDOCS-PL":12.21,"SciFact-PL":57.73,"TRECCOVID-PL":48.83} -{"index":29,"Rank":16,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":39.16,"ArguAna-PL":41.97,"DBPedia-PL":24.07,"FiQA-PL":24.25,"HotpotQA-PL":43.41,"MSMARCO-PL":51.56,"NFCorpus-PL":25.95,"NQ-PL":35.09,"Quora-PL":78.86,"SCIDOCS-PL":11.0,"SciFact-PL":51.92,"TRECCOVID-PL":42.64} -{"index":30,"Rank":17,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":38.43,"ArguAna-PL":44.12,"DBPedia-PL":26.32,"FiQA-PL":24.95,"HotpotQA-PL":45.13,"MSMARCO-PL":25.47,"NFCorpus-PL":28.55,"NQ-PL":37.9,"Quora-PL":77.98,"SCIDOCS-PL":10.9,"SciFact-PL":54.44,"TRECCOVID-PL":46.98} -{"index":41,"Rank":18,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":34.44,"ArguAna-PL":51.87,"DBPedia-PL":24.59,"FiQA-PL":22.27,"HotpotQA-PL":32.11,"MSMARCO-PL":17.91,"NFCorpus-PL":24.05,"NQ-PL":23.54,"Quora-PL":81.49,"SCIDOCS-PL":13.23,"SciFact-PL":52.51,"TRECCOVID-PL":35.23} -{"index":40,"Rank":19,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":32.08,"ArguAna-PL":49.42,"DBPedia-PL":19.82,"FiQA-PL":19.58,"HotpotQA-PL":23.47,"MSMARCO-PL":16.51,"NFCorpus-PL":22.49,"NQ-PL":19.83,"Quora-PL":81.17,"SCIDOCS-PL":12.15,"SciFact-PL":49.49,"TRECCOVID-PL":38.97} -{"index":52,"Rank":20,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":29.16,"ArguAna-PL":42.62,"DBPedia-PL":20.18,"FiQA-PL":14.68,"HotpotQA-PL":29.36,"MSMARCO-PL":12.45,"NFCorpus-PL":18.53,"NQ-PL":15.64,"Quora-PL":79.18,"SCIDOCS-PL":11.18,"SciFact-PL":41.53,"TRECCOVID-PL":35.38} -{"index":51,"Rank":21,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":26.66,"ArguAna-PL":37.83,"DBPedia-PL":18.0,"FiQA-PL":12.49,"HotpotQA-PL":22.76,"MSMARCO-PL":10.39,"NFCorpus-PL":17.16,"NQ-PL":12.56,"Quora-PL":77.18,"SCIDOCS-PL":10.26,"SciFact-PL":40.24,"TRECCOVID-PL":34.38} -{"index":42,"Rank":22,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":23.36,"ArguAna-PL":38.52,"DBPedia-PL":16.1,"FiQA-PL":7.63,"HotpotQA-PL":19.72,"MSMARCO-PL":7.22,"NFCorpus-PL":17.45,"NQ-PL":9.65,"Quora-PL":74.96,"SCIDOCS-PL":7.48,"SciFact-PL":39.79,"TRECCOVID-PL":18.45} -{"index":48,"Rank":23,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":21.18,"ArguAna-PL":36.7,"DBPedia-PL":12.36,"FiQA-PL":8.02,"HotpotQA-PL":20.83,"MSMARCO-PL":4.57,"NFCorpus-PL":16.28,"NQ-PL":5.85,"Quora-PL":71.95,"SCIDOCS-PL":6.5,"SciFact-PL":33.03,"TRECCOVID-PL":16.91} -{"index":23,"Rank":42,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna-PL":15.04,"DBPedia-PL":"","FiQA-PL":3.1,"HotpotQA-PL":"","MSMARCO-PL":"","NFCorpus-PL":4.6,"NQ-PL":"","Quora-PL":77.61,"SCIDOCS-PL":1.4,"SciFact-PL":32.24,"TRECCOVID-PL":24.53} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":47.32,"ArguAna-PL":53.02,"ArguAna-PL (pol-Latn)":52.99,"DBPedia-PL":35.82,"FiQA-PL":33.0,"FiQA-PL (pol-Latn)":32.97,"HotpotQA-PL":67.41,"MSMARCO-PL":33.38,"NFCorpus-PL":30.24,"NFCorpus-PL (pol-Latn)":30.21,"NQ-PL":52.79,"Quora-PL":83.65,"SCIDOCS-PL":13.81,"SCIDOCS-PL (pol-Latn)":13.82,"SciFact-PL":65.66,"SciFact-PL (pol-Latn)":65.66,"TRECCOVID-PL":70.03,"TRECCOVID-PL (pol-Latn)":69.9} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":42.3,"ArguAna-PL":42.81,"ArguAna-PL (pol-Latn)":42.86,"DBPedia-PL":30.23,"FiQA-PL":25.52,"FiQA-PL (pol-Latn)":25.59,"HotpotQA-PL":63.52,"MSMARCO-PL":29.52,"NFCorpus-PL":25.98,"NFCorpus-PL (pol-Latn)":25.99,"NQ-PL":44.8,"Quora-PL":81.22,"SCIDOCS-PL":12.35,"SCIDOCS-PL (pol-Latn)":12.36,"SciFact-PL":62.11,"SciFact-PL (pol-Latn)":62.26,"TRECCOVID-PL":66.06,"TRECCOVID-PL (pol-Latn)":65.94} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":41.06,"ArguAna-PL":37.43,"ArguAna-PL (pol-Latn)":37.49,"DBPedia-PL":29.27,"FiQA-PL":22.03,"FiQA-PL (pol-Latn)":22.02,"HotpotQA-PL":60.15,"MSMARCO-PL":26.94,"NFCorpus-PL":26.48,"NFCorpus-PL (pol-Latn)":26.5,"NQ-PL":40.46,"Quora-PL":78.7,"SCIDOCS-PL":11.6,"SCIDOCS-PL (pol-Latn)":11.59,"SciFact-PL":62.76,"SciFact-PL (pol-Latn)":62.76,"TRECCOVID-PL":70.92,"TRECCOVID-PL (pol-Latn)":70.92} +{"Rank":4,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":28.51,"ArguAna-PL":42.62,"ArguAna-PL (pol-Latn)":42.61,"DBPedia-PL":20.18,"FiQA-PL":14.68,"FiQA-PL (pol-Latn)":14.71,"HotpotQA-PL":29.36,"MSMARCO-PL":12.45,"NFCorpus-PL":18.53,"NFCorpus-PL (pol-Latn)":18.54,"NQ-PL":15.64,"Quora-PL":79.18,"SCIDOCS-PL":11.18,"SCIDOCS-PL (pol-Latn)":11.17,"SciFact-PL":41.53,"SciFact-PL (pol-Latn)":41.55,"TRECCOVID-PL":35.38,"TRECCOVID-PL (pol-Latn)":35.43} +{"Rank":5,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":26.21,"ArguAna-PL":37.83,"ArguAna-PL (pol-Latn)":37.86,"DBPedia-PL":18.0,"FiQA-PL":12.49,"FiQA-PL (pol-Latn)":12.49,"HotpotQA-PL":22.76,"MSMARCO-PL":10.39,"NFCorpus-PL":17.16,"NFCorpus-PL (pol-Latn)":17.17,"NQ-PL":12.56,"Quora-PL":77.18,"SCIDOCS-PL":10.26,"SCIDOCS-PL (pol-Latn)":10.26,"SciFact-PL":40.24,"SciFact-PL (pol-Latn)":40.24,"TRECCOVID-PL":34.38,"TRECCOVID-PL (pol-Latn)":34.23} +{"Rank":6,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":22.73,"ArguAna-PL":38.52,"ArguAna-PL (pol-Latn)":38.56,"DBPedia-PL":16.1,"FiQA-PL":7.63,"FiQA-PL (pol-Latn)":7.66,"HotpotQA-PL":19.72,"MSMARCO-PL":7.22,"NFCorpus-PL":17.45,"NFCorpus-PL (pol-Latn)":17.45,"NQ-PL":9.65,"Quora-PL":74.96,"SCIDOCS-PL":7.48,"SCIDOCS-PL (pol-Latn)":7.47,"SciFact-PL":39.79,"SciFact-PL (pol-Latn)":39.79,"TRECCOVID-PL":18.45,"TRECCOVID-PL (pol-Latn)":18.51} +{"Rank":7,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":null,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":8,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":48.89,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":38.04,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":32.88,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":18.39,"SciFact-PL":null,"SciFact-PL (pol-Latn)":73.22,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":58.01} +{"Rank":9,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"ArguAna-PL":41.97,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":24.07,"FiQA-PL":24.25,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":43.41,"MSMARCO-PL":51.56,"NFCorpus-PL":25.95,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":35.09,"Quora-PL":78.86,"SCIDOCS-PL":11.0,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":51.92,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":42.64,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":10,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"ArguAna-PL":44.12,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":26.32,"FiQA-PL":24.95,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":45.13,"MSMARCO-PL":25.47,"NFCorpus-PL":28.55,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":37.9,"Quora-PL":77.98,"SCIDOCS-PL":10.9,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":54.44,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":46.98,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":11,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"ArguAna-PL":49.42,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":19.82,"FiQA-PL":19.58,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":23.47,"MSMARCO-PL":16.51,"NFCorpus-PL":22.49,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":19.83,"Quora-PL":81.17,"SCIDOCS-PL":12.15,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":49.49,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":38.97,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":12,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"ArguAna-PL":51.87,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":24.59,"FiQA-PL":22.27,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":32.11,"MSMARCO-PL":17.91,"NFCorpus-PL":24.05,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":23.54,"Quora-PL":81.49,"SCIDOCS-PL":13.23,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":52.51,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":35.23,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":13,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":13.4,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":5.82,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":15.43,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":5.34,"SciFact-PL":null,"SciFact-PL (pol-Latn)":22.48,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":16.52} +{"Rank":14,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":11.5,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":2.29,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":10.62,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":3.75,"SciFact-PL":null,"SciFact-PL (pol-Latn)":16.14,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":8.66} +{"Rank":15,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":14.72,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":3.6,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":8.77,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":4.02,"SciFact-PL":null,"SciFact-PL (pol-Latn)":13.31,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":12.12} +{"Rank":16,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":null,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":17,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":null,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":18,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"ArguAna-PL":36.7,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":12.36,"FiQA-PL":8.02,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":20.83,"MSMARCO-PL":4.57,"NFCorpus-PL":16.28,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":5.85,"Quora-PL":71.95,"SCIDOCS-PL":6.5,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":33.03,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":16.91,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":19,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":null,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":20,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":null,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":21,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":null,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":22,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":null,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":23,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":null,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":null} diff --git a/all_data_tasks/32/default.jsonl b/all_data_tasks/32/default.jsonl index 39702d671ddef13478f8fa88bb6d8cea8e05b749..f4a3027693db50d2941d31560e89be8930cce1dc 100644 --- a/all_data_tasks/32/default.jsonl +++ b/all_data_tasks/32/default.jsonl @@ -1,51 +1,23 @@ -{"index":2,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":70.86,"CDSC-R":92.66,"SICK-R-PL":78.89,"STS22 (pl)":41.02} -{"index":22,"Rank":2,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.86,"CDSC-R":92.66,"SICK-R-PL":78.89,"STS22 (pl)":41.02} -{"index":34,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.86,"CDSC-R":92.66,"SICK-R-PL":78.89,"STS22 (pl)":41.02} -{"index":38,"Rank":4,"Model":"mmlw-roberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.7,"CDSC-R":92.54,"SICK-R-PL":79.2,"STS22 (pl)":40.36} -{"index":3,"Rank":5,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.64,"CDSC-R":90.97,"SICK-R-PL":78.16,"STS22 (pl)":42.79} -{"index":39,"Rank":6,"Model":"mmlw-roberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.59,"CDSC-R":92.54,"SICK-R-PL":79.91,"STS22 (pl)":39.32} -{"index":35,"Rank":7,"Model":"mmlw-e5-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.13,"CDSC-R":93.26,"SICK-R-PL":76.77,"STS22 (pl)":40.36} -{"index":36,"Rank":8,"Model":"mmlw-e5-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.98,"CDSC-R":93.5,"SICK-R-PL":76.04,"STS22 (pl)":40.4} -{"index":19,"Rank":9,"Model":"st-polish-kartonberta-base-alpha-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.47,"CDSC-R":92.13,"SICK-R-PL":79.51,"STS22 (pl)":36.78} -{"index":40,"Rank":10,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":68.78,"CDSC-R":89.62,"SICK-R-PL":76.37,"STS22 (pl)":40.36} -{"index":37,"Rank":11,"Model":"mmlw-e5-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.77,"CDSC-R":91.98,"SICK-R-PL":73.71,"STS22 (pl)":40.63} -{"index":1,"Rank":12,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.12,"CDSC-R":89.74,"SICK-R-PL":73.78,"STS22 (pl)":40.83} -{"index":33,"Rank":13,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.12,"CDSC-R":89.74,"SICK-R-PL":73.78,"STS22 (pl)":40.83} -{"index":41,"Rank":14,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":67.36,"CDSC-R":88.55,"SICK-R-PL":76.18,"STS22 (pl)":37.34} -{"index":26,"Rank":15,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":66.91,"CDSC-R":91.0,"SICK-R-PL":75.08,"STS22 (pl)":34.66} -{"index":52,"Rank":16,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":65.19,"CDSC-R":88.8,"SICK-R-PL":73.13,"STS22 (pl)":33.64} -{"index":28,"Rank":17,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":65.18,"CDSC-R":90.27,"SICK-R-PL":69.46,"STS22 (pl)":35.8} -{"index":25,"Rank":18,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":65.13,"CDSC-R":90.08,"SICK-R-PL":71.23,"STS22 (pl)":34.07} -{"index":30,"Rank":19,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":65.01,"CDSC-R":89.09,"SICK-R-PL":67.26,"STS22 (pl)":38.69} -{"index":51,"Rank":20,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":63.83,"CDSC-R":88.98,"SICK-R-PL":68.77,"STS22 (pl)":33.73} -{"index":42,"Rank":21,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":63.57,"CDSC-R":85.53,"SICK-R-PL":65.9,"STS22 (pl)":39.28} -{"index":29,"Rank":22,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":63.53,"CDSC-R":86.18,"SICK-R-PL":64.67,"STS22 (pl)":39.73} -{"index":48,"Rank":23,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":62.59,"CDSC-R":87.67,"SICK-R-PL":65.53,"STS22 (pl)":34.58} -{"index":23,"Rank":24,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.02,"CDSC-R":85.77,"SICK-R-PL":62.98,"STS22 (pl)":25.31} -{"index":0,"Rank":25,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":18.34} -{"index":8,"Rank":30,"Model":"2024-06-15_10-09-42<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":0.46,"SICK-R-PL":0.43,"STS22 (pl)":""} -{"index":9,"Rank":31,"Model":"2024-06-17_21-37-12<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":0.41,"SICK-R-PL":0.32,"STS22 (pl)":""} -{"index":10,"Rank":32,"Model":"2024-06-19_08-22-22<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":0.57,"SICK-R-PL":0.39,"STS22 (pl)":""} -{"index":11,"Rank":33,"Model":"2024-06-19_10-03-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":0.62,"SICK-R-PL":0.42,"STS22 (pl)":""} -{"index":12,"Rank":34,"Model":"2024-06-19_21-12-17<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":0.3,"SICK-R-PL":0.28,"STS22 (pl)":""} -{"index":13,"Rank":35,"Model":"2024-06-19_22-23-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":0.65,"SICK-R-PL":0.55,"STS22 (pl)":""} -{"index":15,"Rank":37,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":35.38} -{"index":16,"Rank":38,"Model":"paraphrase-multilingual-mpnet-base-v2-KE_Sieve<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":36.37} -{"index":17,"Rank":39,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":23.31} -{"index":18,"Rank":40,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":14.91} -{"index":20,"Rank":41,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":42.08} -{"index":21,"Rank":42,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":31.13} -{"index":24,"Rank":43,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":39.21} -{"index":31,"Rank":45,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":27.98} -{"index":32,"Rank":46,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":15.06} -{"index":43,"Rank":47,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":19.22} -{"index":44,"Rank":48,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":26.77} -{"index":46,"Rank":50,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":13.56} -{"index":47,"Rank":51,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":12.47} -{"index":49,"Rank":52,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":30.68} -{"index":50,"Rank":53,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":33.74} -{"index":53,"Rank":54,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":24.42} -{"index":54,"Rank":55,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":25.0} -{"index":55,"Rank":56,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":27.04} -{"index":56,"Rank":57,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":9.71} -{"index":57,"Rank":58,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":34.81} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":83.04,"CDSC-R":91.0,"CDSC-R (pol-Latn)":91.0,"SICK-R-PL":75.08,"SICK-R-PL (pol-Latn)":75.08} +{"Rank":2,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":80.96,"CDSC-R":88.8,"CDSC-R (pol-Latn)":88.8,"SICK-R-PL":73.13,"SICK-R-PL (pol-Latn)":73.13} +{"Rank":3,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":80.66,"CDSC-R":90.08,"CDSC-R (pol-Latn)":90.09,"SICK-R-PL":71.23,"SICK-R-PL (pol-Latn)":71.23} +{"Rank":4,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":79.86,"CDSC-R":90.27,"CDSC-R (pol-Latn)":90.27,"SICK-R-PL":69.46,"SICK-R-PL (pol-Latn)":69.45} +{"Rank":5,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":78.88,"CDSC-R":88.98,"CDSC-R (pol-Latn)":88.98,"SICK-R-PL":68.77,"SICK-R-PL (pol-Latn)":68.77} +{"Rank":6,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":75.72,"CDSC-R":85.53,"CDSC-R (pol-Latn)":85.53,"SICK-R-PL":65.9,"SICK-R-PL (pol-Latn)":65.9} +{"Rank":7,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":null,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":null} +{"Rank":8,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":92.23,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":72.78} +{"Rank":9,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"CDSC-R":86.18,"CDSC-R (pol-Latn)":null,"SICK-R-PL":64.67,"SICK-R-PL (pol-Latn)":null} +{"Rank":10,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"CDSC-R":89.09,"CDSC-R (pol-Latn)":null,"SICK-R-PL":67.26,"SICK-R-PL (pol-Latn)":null} +{"Rank":11,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"CDSC-R":89.62,"CDSC-R (pol-Latn)":null,"SICK-R-PL":76.37,"SICK-R-PL (pol-Latn)":null} +{"Rank":12,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"CDSC-R":88.55,"CDSC-R (pol-Latn)":null,"SICK-R-PL":76.18,"SICK-R-PL (pol-Latn)":null} +{"Rank":13,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":82.5,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":54.26} +{"Rank":14,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":79.45,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":52.43} +{"Rank":15,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":77.04,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":50.2} +{"Rank":16,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":null,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":null} +{"Rank":17,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":null,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":null} +{"Rank":18,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"CDSC-R":87.67,"CDSC-R (pol-Latn)":null,"SICK-R-PL":65.53,"SICK-R-PL (pol-Latn)":null} +{"Rank":19,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":null,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":null} +{"Rank":20,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":null,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":null} +{"Rank":21,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":null,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":null} +{"Rank":22,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":null,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":null} +{"Rank":23,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":null,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":null} diff --git a/all_data_tasks/33/default.jsonl b/all_data_tasks/33/default.jsonl index a9d1ab184341508fa2d07f84f5c7a91f2a7dcae7..d2efe60de93c56eeca8f8b7fc5b011495104b438 100644 --- a/all_data_tasks/33/default.jsonl +++ b/all_data_tasks/33/default.jsonl @@ -1,47 +1,32 @@ -{"index":13,"Rank":1,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":66.73,"MassiveIntentClassification (nb)":62.68,"MassiveScenarioClassification (nb)":67.44,"NoRecClassification":55.46,"NordicLangClassification":85.27,"NorwegianParliament":62.58,"ScalaNbClassification":66.97} -{"index":12,"Rank":2,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":63.94,"MassiveIntentClassification (nb)":60.67,"MassiveScenarioClassification (nb)":67.31,"NoRecClassification":51.32,"NordicLangClassification":84.69,"NorwegianParliament":57.41,"ScalaNbClassification":62.25} -{"index":24,"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":63.64,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":62.76,"NordicLangClassification":82.29,"NorwegianParliament":60.36,"ScalaNbClassification":50.44} -{"index":30,"Rank":4,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"Average":61.75,"MassiveIntentClassification (nb)":54.2,"MassiveScenarioClassification (nb)":60.69,"NoRecClassification":53.4,"NordicLangClassification":82.67,"NorwegianParliament":59.33,"ScalaNbClassification":60.19} -{"index":23,"Rank":5,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":61.63,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":57.58,"NordicLangClassification":75.94,"NorwegianParliament":59.94,"ScalaNbClassification":50.32} -{"index":17,"Rank":6,"Model":"dfm-sentence-encoder-large-1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":61.34,"MassiveIntentClassification (nb)":57.57,"MassiveScenarioClassification (nb)":63.66,"NoRecClassification":50.46,"NordicLangClassification":75.98,"NorwegianParliament":57.66,"ScalaNbClassification":62.69} -{"index":31,"Rank":7,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"Average":60.34,"MassiveIntentClassification (nb)":47.42,"MassiveScenarioClassification (nb)":54.25,"NoRecClassification":50.46,"NordicLangClassification":84.25,"NorwegianParliament":58.85,"ScalaNbClassification":66.79} -{"index":26,"Rank":8,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":58.86,"MassiveIntentClassification (nb)":53.96,"MassiveScenarioClassification (nb)":59.9,"NoRecClassification":53.96,"NordicLangClassification":75.15,"NorwegianParliament":60.15,"ScalaNbClassification":50.06} -{"index":16,"Rank":9,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":58.46,"MassiveIntentClassification (nb)":52.49,"MassiveScenarioClassification (nb)":54.59,"NoRecClassification":48.3,"NordicLangClassification":77.68,"NorwegianParliament":58.78,"ScalaNbClassification":58.95} -{"index":45,"Rank":10,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":55.0,"MassiveIntentClassification (nb)":45.38,"MassiveScenarioClassification (nb)":47.55,"NoRecClassification":46.06,"NordicLangClassification":74.25,"NorwegianParliament":56.79,"ScalaNbClassification":59.99} -{"index":18,"Rank":11,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.92,"MassiveIntentClassification (nb)":59.9,"MassiveScenarioClassification (nb)":65.81,"NoRecClassification":48.25,"NordicLangClassification":48.4,"NorwegianParliament":55.99,"ScalaNbClassification":51.18} -{"index":46,"Rank":12,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":54.34,"MassiveIntentClassification (nb)":40.46,"MassiveScenarioClassification (nb)":44.83,"NoRecClassification":46.28,"NordicLangClassification":79.39,"NorwegianParliament":56.75,"ScalaNbClassification":58.33} -{"index":19,"Rank":13,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":50.14,"MassiveIntentClassification (nb)":41.57,"MassiveScenarioClassification (nb)":50.33,"NoRecClassification":42.0,"NordicLangClassification":59.34,"NorwegianParliament":57.42,"ScalaNbClassification":50.18} -{"index":20,"Rank":14,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":50.01,"MassiveIntentClassification (nb)":40.63,"MassiveScenarioClassification (nb)":51.91,"NoRecClassification":41.83,"NordicLangClassification":58.3,"NorwegianParliament":57.26,"ScalaNbClassification":50.13} -{"index":8,"Rank":15,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":48.88,"MassiveIntentClassification (nb)":42.74,"MassiveScenarioClassification (nb)":49.49,"NoRecClassification":43.53,"NordicLangClassification":51.45,"NorwegianParliament":55.74,"ScalaNbClassification":50.34} -{"index":22,"Rank":16,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":48.46,"MassiveIntentClassification (nb)":40.25,"MassiveScenarioClassification (nb)":48.58,"NoRecClassification":41.84,"NordicLangClassification":53.47,"NorwegianParliament":56.57,"ScalaNbClassification":50.03} -{"index":6,"Rank":17,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":48.18,"MassiveIntentClassification (nb)":35.75,"MassiveScenarioClassification (nb)":35.76,"NoRecClassification":43.91,"NordicLangClassification":62.45,"NorwegianParliament":57.56,"ScalaNbClassification":53.63} -{"index":34,"Rank":18,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.28,"MassiveIntentClassification (nb)":39.34,"MassiveScenarioClassification (nb)":44.67,"NoRecClassification":40.02,"NordicLangClassification":54.71,"NorwegianParliament":54.8,"ScalaNbClassification":50.17} -{"index":29,"Rank":19,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.28,"MassiveIntentClassification (nb)":24.6,"MassiveScenarioClassification (nb)":27.3,"NoRecClassification":45.44,"NordicLangClassification":57.82,"NorwegianParliament":53.25,"ScalaNbClassification":75.28} -{"index":7,"Rank":20,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":34.34,"MassiveIntentClassification (nb)":5.66,"MassiveScenarioClassification (nb)":11.26,"NoRecClassification":39.72,"NordicLangClassification":44.53,"NorwegianParliament":52.44,"ScalaNbClassification":52.41} -{"index":0,"Rank":21,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","MassiveIntentClassification (nb)":37.74,"MassiveScenarioClassification (nb)":43.9,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":1,"Rank":22,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":36.89,"MassiveScenarioClassification (nb)":44.27,"NoRecClassification":43.53,"NordicLangClassification":"","NorwegianParliament":54.9,"ScalaNbClassification":""} -{"index":2,"Rank":23,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":28.65,"MassiveScenarioClassification (nb)":35.24,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":3,"Rank":24,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":31.07,"MassiveScenarioClassification (nb)":38.02,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":4,"Rank":25,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":5,"Rank":26,"Model":"e5-dansk-test-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":61.0,"NorwegianParliament":"","ScalaNbClassification":""} -{"index":9,"Rank":27,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":39.67,"MassiveScenarioClassification (nb)":50.89,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":10,"Rank":28,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":39.82,"MassiveScenarioClassification (nb)":39.02,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":11,"Rank":29,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":39.48,"MassiveScenarioClassification (nb)":40.47,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":14,"Rank":30,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":54.41,"MassiveScenarioClassification (nb)":64.64,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":15,"Rank":31,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":49.41,"MassiveScenarioClassification (nb)":51.8,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":21,"Rank":32,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","MassiveIntentClassification (nb)":70.93,"MassiveScenarioClassification (nb)":75.7,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":25,"Rank":33,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":71.66,"MassiveScenarioClassification (nb)":77.21,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":27,"Rank":34,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":46.18,"MassiveScenarioClassification (nb)":50.32,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":28,"Rank":35,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":44.12,"MassiveScenarioClassification (nb)":46.79,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":32,"Rank":36,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","MassiveIntentClassification (nb)":57.91,"MassiveScenarioClassification (nb)":64.29,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":33,"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","MassiveIntentClassification (nb)":41.91,"MassiveScenarioClassification (nb)":47.36,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":35,"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":36,"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":34.75,"MassiveScenarioClassification (nb)":35.71,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":37,"Rank":40,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","MassiveIntentClassification (nb)":46.01,"MassiveScenarioClassification (nb)":54.98,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":38,"Rank":41,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":43.78,"MassiveScenarioClassification (nb)":52.71,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":39,"Rank":42,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.87,"MassiveScenarioClassification (nb)":54.44,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":40,"Rank":43,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":38.53,"MassiveScenarioClassification (nb)":46.6,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":41,"Rank":44,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":41.29,"MassiveScenarioClassification (nb)":49.92,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":42,"Rank":45,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.91,"MassiveScenarioClassification (nb)":53.43,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":43,"Rank":46,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":31.49,"MassiveScenarioClassification (nb)":38.05,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"index":44,"Rank":47,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":54.64,"MassiveScenarioClassification (nb)":60.26,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":65.06,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":62.76,"NoRecClassification (nob-Latn)":58.43,"NordicLangClassification":82.29,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":80.15,"NorwegianParliament":60.36,"ScalaNbClassification":50.44} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":62.42,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":57.58,"NoRecClassification (nob-Latn)":53.74,"NordicLangClassification":75.94,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":75.85,"NorwegianParliament":59.94,"ScalaNbClassification":50.32} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":59.43,"MassiveIntentClassification (nb)":53.96,"MassiveScenarioClassification (nb)":59.9,"NoRecClassification":53.96,"NoRecClassification (nob-Latn)":50.08,"NordicLangClassification":75.15,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":72.15,"NorwegianParliament":60.15,"ScalaNbClassification":50.06} +{"Rank":4,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.04,"MassiveIntentClassification (nb)":39.34,"MassiveScenarioClassification (nb)":44.67,"NoRecClassification":40.02,"NoRecClassification (nob-Latn)":37.93,"NordicLangClassification":54.71,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":54.7,"NorwegianParliament":54.8,"ScalaNbClassification":50.17} +{"Rank":5,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","MassiveIntentClassification (nb)":37.74,"MassiveScenarioClassification (nb)":43.9,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":6,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NoRecClassification (nob-Latn)":52.05,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":63.6,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":7,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","MassiveIntentClassification (nb)":35.75,"MassiveScenarioClassification (nb)":35.76,"NoRecClassification":43.91,"NoRecClassification (nob-Latn)":"","NordicLangClassification":62.45,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":57.56,"ScalaNbClassification":53.63} +{"Rank":8,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":"","MassiveIntentClassification (nb)":5.66,"MassiveScenarioClassification (nb)":11.26,"NoRecClassification":39.72,"NoRecClassification (nob-Latn)":"","NordicLangClassification":44.53,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":52.44,"ScalaNbClassification":52.41} +{"Rank":9,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","MassiveIntentClassification (nb)":42.74,"MassiveScenarioClassification (nb)":49.49,"NoRecClassification":43.53,"NoRecClassification (nob-Latn)":"","NordicLangClassification":51.45,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":55.74,"ScalaNbClassification":50.34} +{"Rank":10,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","MassiveIntentClassification (nb)":60.67,"MassiveScenarioClassification (nb)":67.31,"NoRecClassification":51.32,"NoRecClassification (nob-Latn)":"","NordicLangClassification":84.69,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":57.41,"ScalaNbClassification":62.25} +{"Rank":11,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":"","MassiveIntentClassification (nb)":62.68,"MassiveScenarioClassification (nb)":67.44,"NoRecClassification":55.46,"NoRecClassification (nob-Latn)":"","NordicLangClassification":85.27,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":62.58,"ScalaNbClassification":66.97} +{"Rank":12,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":"","MassiveIntentClassification (nb)":52.49,"MassiveScenarioClassification (nb)":54.59,"NoRecClassification":48.3,"NoRecClassification (nob-Latn)":"","NordicLangClassification":77.68,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":58.78,"ScalaNbClassification":58.95} +{"Rank":13,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":41.57,"MassiveScenarioClassification (nb)":50.33,"NoRecClassification":42.0,"NoRecClassification (nob-Latn)":"","NordicLangClassification":59.34,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":57.42,"ScalaNbClassification":50.18} +{"Rank":14,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","MassiveIntentClassification (nb)":40.63,"MassiveScenarioClassification (nb)":51.91,"NoRecClassification":41.83,"NoRecClassification (nob-Latn)":"","NordicLangClassification":58.3,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":57.26,"ScalaNbClassification":50.13} +{"Rank":15,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","MassiveIntentClassification (nb)":40.25,"MassiveScenarioClassification (nb)":48.58,"NoRecClassification":41.84,"NoRecClassification (nob-Latn)":"","NordicLangClassification":53.47,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":56.57,"ScalaNbClassification":50.03} +{"Rank":16,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","MassiveIntentClassification (nb)":24.6,"MassiveScenarioClassification (nb)":27.3,"NoRecClassification":45.44,"NoRecClassification (nob-Latn)":"","NordicLangClassification":57.82,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":53.25,"ScalaNbClassification":75.28} +{"Rank":17,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"Average":"","MassiveIntentClassification (nb)":54.2,"MassiveScenarioClassification (nb)":60.69,"NoRecClassification":53.4,"NoRecClassification (nob-Latn)":"","NordicLangClassification":82.67,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":59.33,"ScalaNbClassification":60.19} +{"Rank":18,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"Average":"","MassiveIntentClassification (nb)":47.42,"MassiveScenarioClassification (nb)":54.25,"NoRecClassification":50.46,"NoRecClassification (nob-Latn)":"","NordicLangClassification":84.25,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":58.85,"ScalaNbClassification":66.79} +{"Rank":19,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","MassiveIntentClassification (nb)":57.91,"MassiveScenarioClassification (nb)":64.29,"NoRecClassification":"","NoRecClassification (nob-Latn)":45.45,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":35.39,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":20,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","MassiveIntentClassification (nb)":41.91,"MassiveScenarioClassification (nb)":47.36,"NoRecClassification":"","NoRecClassification (nob-Latn)":37.73,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":54.17,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":21,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NoRecClassification (nob-Latn)":38.34,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":50.15,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":22,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":34.75,"MassiveScenarioClassification (nb)":35.71,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":23,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","MassiveIntentClassification (nb)":46.01,"MassiveScenarioClassification (nb)":54.98,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":24,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":43.78,"MassiveScenarioClassification (nb)":52.71,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":25,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.87,"MassiveScenarioClassification (nb)":54.44,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":26,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NoRecClassification (nob-Latn)":46.7,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":42.52,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":27,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NoRecClassification (nob-Latn)":50.32,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":41.57,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":28,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":38.53,"MassiveScenarioClassification (nb)":46.6,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":29,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":41.29,"MassiveScenarioClassification (nb)":49.92,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":30,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.91,"MassiveScenarioClassification (nb)":53.43,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":31,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","MassiveIntentClassification (nb)":45.38,"MassiveScenarioClassification (nb)":47.55,"NoRecClassification":46.06,"NoRecClassification (nob-Latn)":"","NordicLangClassification":74.25,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":56.79,"ScalaNbClassification":59.99} +{"Rank":32,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","MassiveIntentClassification (nb)":40.46,"MassiveScenarioClassification (nb)":44.83,"NoRecClassification":46.28,"NoRecClassification (nob-Latn)":"","NordicLangClassification":79.39,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":56.75,"ScalaNbClassification":58.33} diff --git a/all_data_tasks/34/default.jsonl b/all_data_tasks/34/default.jsonl index c06fa30f294e52b07f25d7f6c6f1923b938f13af..d0d84bec7f740f26c57813b82231cb3076a39437 100644 --- a/all_data_tasks/34/default.jsonl +++ b/all_data_tasks/34/default.jsonl @@ -1,128 +1,56 @@ -{"index":70,"Rank":1,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.36,"AmazonCounterfactualClassification (de)":66.71,"AmazonCounterfactualClassification (ja)":76.05,"AmazonReviewsClassification (de)":53.0,"AmazonReviewsClassification (es)":48.81,"AmazonReviewsClassification (fr)":48.51,"AmazonReviewsClassification (ja)":47.71,"AmazonReviewsClassification (zh)":44.66,"MTOPDomainClassification (de)":92.68,"MTOPDomainClassification (es)":92.27,"MTOPDomainClassification (fr)":89.97,"MTOPDomainClassification (hi)":90.03,"MTOPDomainClassification (th)":87.56,"MTOPIntentClassification (de)":79.37,"MTOPIntentClassification (es)":80.34,"MTOPIntentClassification (fr)":76.72,"MTOPIntentClassification (hi)":77.87,"MTOPIntentClassification (th)":77.74,"MassiveIntentClassification (af)":67.66,"MassiveIntentClassification (am)":60.71,"MassiveIntentClassification (ar)":63.61,"MassiveIntentClassification (az)":69.0,"MassiveIntentClassification (bn)":68.54,"MassiveIntentClassification (cy)":63.03,"MassiveIntentClassification (de)":72.47,"MassiveIntentClassification (el)":71.24,"MassiveIntentClassification (es)":72.82,"MassiveIntentClassification (fa)":74.25,"MassiveIntentClassification (fi)":72.29,"MassiveIntentClassification (fr)":73.32,"MassiveIntentClassification (he)":70.22,"MassiveIntentClassification (hi)":71.58,"MassiveIntentClassification (hu)":71.92,"MassiveIntentClassification (hy)":68.07,"MassiveIntentClassification (id)":72.62,"MassiveIntentClassification (is)":65.77,"MassiveIntentClassification (it)":73.45,"MassiveIntentClassification (ja)":74.69,"MassiveIntentClassification (jv)":63.04,"MassiveIntentClassification (ka)":58.91,"MassiveIntentClassification (km)":54.43,"MassiveIntentClassification (kn)":66.33,"MassiveIntentClassification (ko)":70.59,"MassiveIntentClassification (lv)":69.11,"MassiveIntentClassification (ml)":69.7,"MassiveIntentClassification (mn)":66.44,"MassiveIntentClassification (ms)":70.8,"MassiveIntentClassification (my)":64.79,"MassiveIntentClassification (nl)":74.43,"MassiveIntentClassification (pt)":73.63,"MassiveIntentClassification (ro)":71.89,"MassiveIntentClassification (ru)":74.16,"MassiveIntentClassification (sl)":69.96,"MassiveIntentClassification (sq)":69.5,"MassiveIntentClassification (sw)":63.01,"MassiveIntentClassification (ta)":66.91,"MassiveIntentClassification (te)":67.62,"MassiveIntentClassification (th)":69.51,"MassiveIntentClassification (tl)":69.31,"MassiveIntentClassification (tr)":72.24,"MassiveIntentClassification (ur)":67.5,"MassiveIntentClassification (vi)":71.29,"MassiveIntentClassification (zh-TW)":69.38,"MassiveScenarioClassification (af)":73.34,"MassiveScenarioClassification (am)":65.84,"MassiveScenarioClassification (ar)":69.76,"MassiveScenarioClassification (az)":72.02,"MassiveScenarioClassification (bn)":72.76,"MassiveScenarioClassification (cy)":68.02,"MassiveScenarioClassification (de)":77.68,"MassiveScenarioClassification (el)":76.13,"MassiveScenarioClassification (es)":76.97,"MassiveScenarioClassification (fa)":78.1,"MassiveScenarioClassification (fi)":75.21,"MassiveScenarioClassification (fr)":77.07,"MassiveScenarioClassification (he)":73.53,"MassiveScenarioClassification (hi)":75.75,"MassiveScenarioClassification (hu)":77.09,"MassiveScenarioClassification (hy)":71.08,"MassiveScenarioClassification (id)":77.1,"MassiveScenarioClassification (is)":71.26,"MassiveScenarioClassification (it)":77.08,"MassiveScenarioClassification (ja)":79.35,"MassiveScenarioClassification (jv)":68.42,"MassiveScenarioClassification (ka)":66.16,"MassiveScenarioClassification (km)":60.11,"MassiveScenarioClassification (kn)":71.25,"MassiveScenarioClassification (ko)":76.46,"MassiveScenarioClassification (lv)":73.25,"MassiveScenarioClassification (ml)":74.12,"MassiveScenarioClassification (mn)":70.02,"MassiveScenarioClassification (ms)":74.41,"MassiveScenarioClassification (my)":68.4,"MassiveScenarioClassification (nl)":78.52,"MassiveScenarioClassification (pt)":77.12,"MassiveScenarioClassification (ro)":75.26,"MassiveScenarioClassification (ru)":77.71,"MassiveScenarioClassification (sl)":74.84,"MassiveScenarioClassification (sq)":74.72,"MassiveScenarioClassification (sw)":67.92,"MassiveScenarioClassification (ta)":70.93,"MassiveScenarioClassification (te)":72.41,"MassiveScenarioClassification (th)":75.18,"MassiveScenarioClassification (tl)":72.86,"MassiveScenarioClassification (tr)":76.47,"MassiveScenarioClassification (ur)":71.89,"MassiveScenarioClassification (vi)":74.75,"MassiveScenarioClassification (zh-TW)":75.46} -{"index":69,"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":67.48,"AmazonCounterfactualClassification (de)":71.22,"AmazonCounterfactualClassification (ja)":77.84,"AmazonReviewsClassification (de)":45.4,"AmazonReviewsClassification (es)":43.07,"AmazonReviewsClassification (fr)":41.91,"AmazonReviewsClassification (ja)":40.12,"AmazonReviewsClassification (zh)":38.83,"MTOPDomainClassification (de)":91.95,"MTOPDomainClassification (es)":92.2,"MTOPDomainClassification (fr)":86.41,"MTOPDomainClassification (hi)":89.78,"MTOPDomainClassification (th)":88.75,"MTOPIntentClassification (de)":74.53,"MTOPIntentClassification (es)":75.44,"MTOPIntentClassification (fr)":59.43,"MTOPIntentClassification (hi)":73.12,"MTOPIntentClassification (th)":74.29,"MassiveIntentClassification (af)":62.38,"MassiveIntentClassification (am)":56.07,"MassiveIntentClassification (ar)":60.75,"MassiveIntentClassification (az)":64.87,"MassiveIntentClassification (bn)":63.97,"MassiveIntentClassification (cy)":54.98,"MassiveIntentClassification (de)":69.33,"MassiveIntentClassification (el)":69.09,"MassiveIntentClassification (es)":70.49,"MassiveIntentClassification (fa)":69.91,"MassiveIntentClassification (fi)":70.12,"MassiveIntentClassification (fr)":69.34,"MassiveIntentClassification (he)":67.61,"MassiveIntentClassification (hi)":67.8,"MassiveIntentClassification (hu)":69.69,"MassiveIntentClassification (hy)":62.07,"MassiveIntentClassification (id)":70.05,"MassiveIntentClassification (is)":62.42,"MassiveIntentClassification (it)":70.53,"MassiveIntentClassification (ja)":72.51,"MassiveIntentClassification (jv)":57.91,"MassiveIntentClassification (ka)":51.78,"MassiveIntentClassification (km)":47.02,"MassiveIntentClassification (kn)":62.16,"MassiveIntentClassification (ko)":69.43,"MassiveIntentClassification (lv)":67.08,"MassiveIntentClassification (ml)":65.57,"MassiveIntentClassification (mn)":61.71,"MassiveIntentClassification (ms)":66.04,"MassiveIntentClassification (my)":60.88,"MassiveIntentClassification (nl)":70.45,"MassiveIntentClassification (pt)":70.73,"MassiveIntentClassification (ro)":68.36,"MassiveIntentClassification (ru)":71.7,"MassiveIntentClassification (sl)":67.09,"MassiveIntentClassification (sq)":65.18,"MassiveIntentClassification (sw)":58.5,"MassiveIntentClassification (ta)":62.69,"MassiveIntentClassification (te)":63.02,"MassiveIntentClassification (th)":68.29,"MassiveIntentClassification (tl)":64.77,"MassiveIntentClassification (tr)":69.87,"MassiveIntentClassification (ur)":64.05,"MassiveIntentClassification (vi)":69.38,"MassiveIntentClassification (zh-TW)":66.2,"MassiveScenarioClassification (af)":68.74,"MassiveScenarioClassification (am)":60.59,"MassiveScenarioClassification (ar)":66.23,"MassiveScenarioClassification (az)":66.48,"MassiveScenarioClassification (bn)":67.75,"MassiveScenarioClassification (cy)":59.09,"MassiveScenarioClassification (de)":74.7,"MassiveScenarioClassification (el)":73.9,"MassiveScenarioClassification (es)":74.31,"MassiveScenarioClassification (fa)":72.95,"MassiveScenarioClassification (fi)":73.12,"MassiveScenarioClassification (fr)":73.87,"MassiveScenarioClassification (he)":71.4,"MassiveScenarioClassification (hi)":72.13,"MassiveScenarioClassification (hu)":74.52,"MassiveScenarioClassification (hy)":64.94,"MassiveScenarioClassification (id)":74.12,"MassiveScenarioClassification (is)":67.15,"MassiveScenarioClassification (it)":74.32,"MassiveScenarioClassification (ja)":77.47,"MassiveScenarioClassification (jv)":63.32,"MassiveScenarioClassification (ka)":58.35,"MassiveScenarioClassification (km)":50.88,"MassiveScenarioClassification (kn)":66.44,"MassiveScenarioClassification (ko)":75.05,"MassiveScenarioClassification (lv)":70.5,"MassiveScenarioClassification (ml)":69.94,"MassiveScenarioClassification (mn)":64.79,"MassiveScenarioClassification (ms)":69.88,"MassiveScenarioClassification (my)":63.25,"MassiveScenarioClassification (nl)":74.83,"MassiveScenarioClassification (pt)":73.49,"MassiveScenarioClassification (ro)":71.72,"MassiveScenarioClassification (ru)":75.14,"MassiveScenarioClassification (sl)":71.87,"MassiveScenarioClassification (sq)":70.28,"MassiveScenarioClassification (sw)":63.14,"MassiveScenarioClassification (ta)":66.28,"MassiveScenarioClassification (te)":66.69,"MassiveScenarioClassification (th)":73.45,"MassiveScenarioClassification (tl)":67.71,"MassiveScenarioClassification (tr)":73.99,"MassiveScenarioClassification (ur)":68.15,"MassiveScenarioClassification (vi)":73.11,"MassiveScenarioClassification (zh-TW)":71.81} -{"index":67,"Rank":3,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":66.64,"AmazonCounterfactualClassification (de)":74.05,"AmazonCounterfactualClassification (ja)":77.22,"AmazonReviewsClassification (de)":53.26,"AmazonReviewsClassification (es)":50.33,"AmazonReviewsClassification (fr)":36.71,"AmazonReviewsClassification (ja)":48.69,"AmazonReviewsClassification (zh)":46.24,"MTOPDomainClassification (de)":92.98,"MTOPDomainClassification (es)":93.37,"MTOPDomainClassification (fr)":74.8,"MTOPDomainClassification (hi)":88.81,"MTOPDomainClassification (th)":85.52,"MTOPIntentClassification (de)":77.77,"MTOPIntentClassification (es)":79.94,"MTOPIntentClassification (fr)":53.97,"MTOPIntentClassification (hi)":72.91,"MTOPIntentClassification (th)":73.24,"MassiveIntentClassification (af)":66.48,"MassiveIntentClassification (am)":44.29,"MassiveIntentClassification (ar)":63.17,"MassiveIntentClassification (az)":64.23,"MassiveIntentClassification (bn)":64.94,"MassiveIntentClassification (cy)":55.48,"MassiveIntentClassification (de)":74.09,"MassiveIntentClassification (el)":68.31,"MassiveIntentClassification (es)":75.09,"MassiveIntentClassification (fa)":72.21,"MassiveIntentClassification (fi)":68.74,"MassiveIntentClassification (fr)":46.39,"MassiveIntentClassification (he)":66.22,"MassiveIntentClassification (hi)":69.45,"MassiveIntentClassification (hu)":69.41,"MassiveIntentClassification (hy)":56.92,"MassiveIntentClassification (id)":72.71,"MassiveIntentClassification (is)":59.91,"MassiveIntentClassification (it)":75.25,"MassiveIntentClassification (ja)":76.36,"MassiveIntentClassification (jv)":57.92,"MassiveIntentClassification (ka)":52.55,"MassiveIntentClassification (km)":46.45,"MassiveIntentClassification (kn)":53.96,"MassiveIntentClassification (ko)":74.21,"MassiveIntentClassification (lv)":59.23,"MassiveIntentClassification (ml)":51.45,"MassiveIntentClassification (mn)":51.38,"MassiveIntentClassification (ms)":69.85,"MassiveIntentClassification (my)":49.15,"MassiveIntentClassification (nl)":74.83,"MassiveIntentClassification (pt)":75.27,"MassiveIntentClassification (ro)":69.63,"MassiveIntentClassification (ru)":76.63,"MassiveIntentClassification (sl)":67.15,"MassiveIntentClassification (sq)":58.84,"MassiveIntentClassification (sw)":57.37,"MassiveIntentClassification (ta)":53.15,"MassiveIntentClassification (te)":51.51,"MassiveIntentClassification (th)":66.91,"MassiveIntentClassification (tl)":68.73,"MassiveIntentClassification (tr)":72.07,"MassiveIntentClassification (ur)":62.09,"MassiveIntentClassification (vi)":71.17,"MassiveIntentClassification (zh-TW)":71.14,"MassiveScenarioClassification (af)":73.37,"MassiveScenarioClassification (am)":47.21,"MassiveScenarioClassification (ar)":69.84,"MassiveScenarioClassification (az)":67.0,"MassiveScenarioClassification (bn)":68.05,"MassiveScenarioClassification (cy)":61.88,"MassiveScenarioClassification (de)":79.03,"MassiveScenarioClassification (el)":72.97,"MassiveScenarioClassification (es)":78.84,"MassiveScenarioClassification (fa)":76.74,"MassiveScenarioClassification (fi)":71.22,"MassiveScenarioClassification (fr)":53.86,"MassiveScenarioClassification (he)":69.64,"MassiveScenarioClassification (hi)":73.51,"MassiveScenarioClassification (hu)":74.06,"MassiveScenarioClassification (hy)":59.55,"MassiveScenarioClassification (id)":77.41,"MassiveScenarioClassification (is)":66.58,"MassiveScenarioClassification (it)":78.39,"MassiveScenarioClassification (ja)":79.62,"MassiveScenarioClassification (jv)":64.29,"MassiveScenarioClassification (ka)":57.52,"MassiveScenarioClassification (km)":52.42,"MassiveScenarioClassification (kn)":58.55,"MassiveScenarioClassification (ko)":78.89,"MassiveScenarioClassification (lv)":63.5,"MassiveScenarioClassification (ml)":54.03,"MassiveScenarioClassification (mn)":54.24,"MassiveScenarioClassification (ms)":75.53,"MassiveScenarioClassification (my)":52.19,"MassiveScenarioClassification (nl)":78.48,"MassiveScenarioClassification (pt)":77.96,"MassiveScenarioClassification (ro)":73.19,"MassiveScenarioClassification (ru)":80.52,"MassiveScenarioClassification (sl)":73.66,"MassiveScenarioClassification (sq)":64.03,"MassiveScenarioClassification (sw)":64.66,"MassiveScenarioClassification (ta)":57.76,"MassiveScenarioClassification (te)":57.27,"MassiveScenarioClassification (th)":72.46,"MassiveScenarioClassification (tl)":73.71,"MassiveScenarioClassification (tr)":75.04,"MassiveScenarioClassification (ur)":67.05,"MassiveScenarioClassification (vi)":75.52,"MassiveScenarioClassification (zh-TW)":76.87} -{"index":68,"Rank":4,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":64.32,"AmazonCounterfactualClassification (de)":71.72,"AmazonCounterfactualClassification (ja)":73.33,"AmazonReviewsClassification (de)":41.83,"AmazonReviewsClassification (es)":40.53,"AmazonReviewsClassification (fr)":40.94,"AmazonReviewsClassification (ja)":37.44,"AmazonReviewsClassification (zh)":37.23,"MTOPDomainClassification (de)":89.63,"MTOPDomainClassification (es)":90.59,"MTOPDomainClassification (fr)":84.79,"MTOPDomainClassification (hi)":87.32,"MTOPDomainClassification (th)":86.24,"MTOPIntentClassification (de)":71.23,"MTOPIntentClassification (es)":71.27,"MTOPIntentClassification (fr)":55.51,"MTOPIntentClassification (hi)":69.24,"MTOPIntentClassification (th)":71.71,"MassiveIntentClassification (af)":59.98,"MassiveIntentClassification (am)":53.66,"MassiveIntentClassification (ar)":57.46,"MassiveIntentClassification (az)":62.59,"MassiveIntentClassification (bn)":61.13,"MassiveIntentClassification (cy)":50.06,"MassiveIntentClassification (de)":66.09,"MassiveIntentClassification (el)":64.68,"MassiveIntentClassification (es)":68.4,"MassiveIntentClassification (fa)":67.25,"MassiveIntentClassification (fi)":65.78,"MassiveIntentClassification (fr)":67.95,"MassiveIntentClassification (he)":62.05,"MassiveIntentClassification (hi)":64.95,"MassiveIntentClassification (hu)":64.97,"MassiveIntentClassification (hy)":60.08,"MassiveIntentClassification (id)":66.64,"MassiveIntentClassification (is)":56.39,"MassiveIntentClassification (it)":68.93,"MassiveIntentClassification (ja)":68.94,"MassiveIntentClassification (jv)":54.26,"MassiveIntentClassification (ka)":48.99,"MassiveIntentClassification (km)":44.69,"MassiveIntentClassification (kn)":59.19,"MassiveIntentClassification (ko)":66.34,"MassiveIntentClassification (lv)":60.34,"MassiveIntentClassification (ml)":63.09,"MassiveIntentClassification (mn)":58.76,"MassiveIntentClassification (ms)":62.48,"MassiveIntentClassification (my)":58.56,"MassiveIntentClassification (nl)":67.3,"MassiveIntentClassification (pt)":68.98,"MassiveIntentClassification (ro)":65.54,"MassiveIntentClassification (ru)":69.02,"MassiveIntentClassification (sl)":62.35,"MassiveIntentClassification (sq)":61.23,"MassiveIntentClassification (sw)":56.0,"MassiveIntentClassification (ta)":58.71,"MassiveIntentClassification (te)":59.72,"MassiveIntentClassification (th)":65.6,"MassiveIntentClassification (tl)":60.86,"MassiveIntentClassification (tr)":67.41,"MassiveIntentClassification (ur)":61.52,"MassiveIntentClassification (vi)":66.17,"MassiveIntentClassification (zh-TW)":64.65,"MassiveScenarioClassification (af)":65.09,"MassiveScenarioClassification (am)":58.52,"MassiveScenarioClassification (ar)":62.24,"MassiveScenarioClassification (az)":63.75,"MassiveScenarioClassification (bn)":65.0,"MassiveScenarioClassification (cy)":52.84,"MassiveScenarioClassification (de)":71.95,"MassiveScenarioClassification (el)":70.18,"MassiveScenarioClassification (es)":71.5,"MassiveScenarioClassification (fa)":70.25,"MassiveScenarioClassification (fi)":69.13,"MassiveScenarioClassification (fr)":71.89,"MassiveScenarioClassification (he)":67.44,"MassiveScenarioClassification (hi)":69.16,"MassiveScenarioClassification (hu)":70.75,"MassiveScenarioClassification (hy)":63.14,"MassiveScenarioClassification (id)":70.7,"MassiveScenarioClassification (is)":60.94,"MassiveScenarioClassification (it)":72.32,"MassiveScenarioClassification (ja)":74.65,"MassiveScenarioClassification (jv)":59.69,"MassiveScenarioClassification (ka)":54.37,"MassiveScenarioClassification (km)":48.31,"MassiveScenarioClassification (kn)":62.15,"MassiveScenarioClassification (ko)":72.45,"MassiveScenarioClassification (lv)":62.81,"MassiveScenarioClassification (ml)":68.04,"MassiveScenarioClassification (mn)":61.44,"MassiveScenarioClassification (ms)":66.9,"MassiveScenarioClassification (my)":61.64,"MassiveScenarioClassification (nl)":72.11,"MassiveScenarioClassification (pt)":70.83,"MassiveScenarioClassification (ro)":69.19,"MassiveScenarioClassification (ru)":72.99,"MassiveScenarioClassification (sl)":65.26,"MassiveScenarioClassification (sq)":66.49,"MassiveScenarioClassification (sw)":59.89,"MassiveScenarioClassification (ta)":62.38,"MassiveScenarioClassification (te)":62.59,"MassiveScenarioClassification (th)":71.61,"MassiveScenarioClassification (tl)":62.74,"MassiveScenarioClassification (tr)":71.67,"MassiveScenarioClassification (ur)":64.64,"MassiveScenarioClassification (vi)":70.01,"MassiveScenarioClassification (zh-TW)":70.69} -{"index":55,"Rank":5,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.11,"AmazonCounterfactualClassification (de)":72.02,"AmazonCounterfactualClassification (ja)":71.79,"AmazonReviewsClassification (de)":34.61,"AmazonReviewsClassification (es)":35.17,"AmazonReviewsClassification (fr)":34.91,"AmazonReviewsClassification (ja)":31.84,"AmazonReviewsClassification (zh)":31.91,"MTOPDomainClassification (de)":89.54,"MTOPDomainClassification (es)":90.62,"MTOPDomainClassification (fr)":86.19,"MTOPDomainClassification (hi)":89.42,"MTOPDomainClassification (th)":85.9,"MTOPIntentClassification (de)":73.43,"MTOPIntentClassification (es)":73.84,"MTOPIntentClassification (fr)":66.75,"MTOPIntentClassification (hi)":69.14,"MTOPIntentClassification (th)":68.47,"MassiveIntentClassification (af)":58.15,"MassiveIntentClassification (am)":57.91,"MassiveIntentClassification (ar)":57.23,"MassiveIntentClassification (az)":57.17,"MassiveIntentClassification (bn)":62.24,"MassiveIntentClassification (cy)":51.19,"MassiveIntentClassification (de)":61.31,"MassiveIntentClassification (el)":64.21,"MassiveIntentClassification (es)":63.97,"MassiveIntentClassification (fa)":66.67,"MassiveIntentClassification (fi)":62.86,"MassiveIntentClassification (fr)":58.55,"MassiveIntentClassification (he)":63.45,"MassiveIntentClassification (hi)":61.32,"MassiveIntentClassification (hu)":62.91,"MassiveIntentClassification (hy)":60.03,"MassiveIntentClassification (id)":63.29,"MassiveIntentClassification (is)":56.02,"MassiveIntentClassification (it)":64.44,"MassiveIntentClassification (ja)":63.7,"MassiveIntentClassification (jv)":52.0,"MassiveIntentClassification (ka)":54.09,"MassiveIntentClassification (km)":43.34,"MassiveIntentClassification (kn)":57.87,"MassiveIntentClassification (ko)":62.69,"MassiveIntentClassification (lv)":56.24,"MassiveIntentClassification (ml)":62.81,"MassiveIntentClassification (mn)":58.49,"MassiveIntentClassification (ms)":61.56,"MassiveIntentClassification (my)":59.4,"MassiveIntentClassification (nl)":64.56,"MassiveIntentClassification (pt)":63.49,"MassiveIntentClassification (ro)":62.53,"MassiveIntentClassification (ru)":62.63,"MassiveIntentClassification (sl)":63.43,"MassiveIntentClassification (sq)":61.45,"MassiveIntentClassification (sw)":56.25,"MassiveIntentClassification (ta)":59.75,"MassiveIntentClassification (te)":59.61,"MassiveIntentClassification (th)":59.42,"MassiveIntentClassification (tl)":58.12,"MassiveIntentClassification (tr)":60.91,"MassiveIntentClassification (ur)":59.49,"MassiveIntentClassification (vi)":60.48,"MassiveIntentClassification (zh-TW)":56.73,"MassiveScenarioClassification (af)":64.06,"MassiveScenarioClassification (am)":63.24,"MassiveScenarioClassification (ar)":63.69,"MassiveScenarioClassification (az)":60.86,"MassiveScenarioClassification (bn)":67.17,"MassiveScenarioClassification (cy)":56.52,"MassiveScenarioClassification (de)":67.48,"MassiveScenarioClassification (el)":70.23,"MassiveScenarioClassification (es)":69.08,"MassiveScenarioClassification (fa)":72.1,"MassiveScenarioClassification (fi)":67.16,"MassiveScenarioClassification (fr)":63.02,"MassiveScenarioClassification (he)":68.83,"MassiveScenarioClassification (hi)":66.9,"MassiveScenarioClassification (hu)":69.33,"MassiveScenarioClassification (hy)":65.82,"MassiveScenarioClassification (id)":68.98,"MassiveScenarioClassification (is)":63.14,"MassiveScenarioClassification (it)":70.04,"MassiveScenarioClassification (ja)":70.68,"MassiveScenarioClassification (jv)":59.79,"MassiveScenarioClassification (ka)":61.03,"MassiveScenarioClassification (km)":49.05,"MassiveScenarioClassification (kn)":63.78,"MassiveScenarioClassification (ko)":69.6,"MassiveScenarioClassification (lv)":59.97,"MassiveScenarioClassification (ml)":69.2,"MassiveScenarioClassification (mn)":62.72,"MassiveScenarioClassification (ms)":67.87,"MassiveScenarioClassification (my)":64.98,"MassiveScenarioClassification (nl)":69.8,"MassiveScenarioClassification (pt)":67.5,"MassiveScenarioClassification (ro)":67.53,"MassiveScenarioClassification (ru)":67.96,"MassiveScenarioClassification (sl)":69.57,"MassiveScenarioClassification (sq)":68.48,"MassiveScenarioClassification (sw)":63.18,"MassiveScenarioClassification (ta)":64.85,"MassiveScenarioClassification (te)":65.39,"MassiveScenarioClassification (th)":67.99,"MassiveScenarioClassification (tl)":63.4,"MassiveScenarioClassification (tr)":65.77,"MassiveScenarioClassification (ur)":65.81,"MassiveScenarioClassification (vi)":66.52,"MassiveScenarioClassification (zh-TW)":63.3} -{"index":71,"Rank":6,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":61.63,"AmazonCounterfactualClassification (de)":71.65,"AmazonCounterfactualClassification (ja)":64.19,"AmazonReviewsClassification (de)":40.25,"AmazonReviewsClassification (es)":40.39,"AmazonReviewsClassification (fr)":39.68,"AmazonReviewsClassification (ja)":37.68,"AmazonReviewsClassification (zh)":37.5,"MTOPDomainClassification (de)":87.47,"MTOPDomainClassification (es)":89.27,"MTOPDomainClassification (fr)":81.2,"MTOPDomainClassification (hi)":86.02,"MTOPDomainClassification (th)":85.35,"MTOPIntentClassification (de)":65.86,"MTOPIntentClassification (es)":67.97,"MTOPIntentClassification (fr)":46.01,"MTOPIntentClassification (hi)":66.3,"MTOPIntentClassification (th)":67.52,"MassiveIntentClassification (af)":57.07,"MassiveIntentClassification (am)":51.96,"MassiveIntentClassification (ar)":54.43,"MassiveIntentClassification (az)":59.8,"MassiveIntentClassification (bn)":59.38,"MassiveIntentClassification (cy)":46.56,"MassiveIntentClassification (de)":62.73,"MassiveIntentClassification (el)":61.6,"MassiveIntentClassification (es)":66.31,"MassiveIntentClassification (fa)":65.54,"MassiveIntentClassification (fi)":61.46,"MassiveIntentClassification (fr)":65.47,"MassiveIntentClassification (he)":58.05,"MassiveIntentClassification (hi)":64.07,"MassiveIntentClassification (hu)":60.95,"MassiveIntentClassification (hy)":57.4,"MassiveIntentClassification (id)":64.17,"MassiveIntentClassification (is)":52.26,"MassiveIntentClassification (it)":65.54,"MassiveIntentClassification (ja)":68.23,"MassiveIntentClassification (jv)":50.85,"MassiveIntentClassification (ka)":48.45,"MassiveIntentClassification (km)":42.83,"MassiveIntentClassification (kn)":57.51,"MassiveIntentClassification (ko)":63.79,"MassiveIntentClassification (lv)":54.99,"MassiveIntentClassification (ml)":61.9,"MassiveIntentClassification (mn)":57.1,"MassiveIntentClassification (ms)":58.99,"MassiveIntentClassification (my)":55.9,"MassiveIntentClassification (nl)":65.64,"MassiveIntentClassification (pt)":66.85,"MassiveIntentClassification (ro)":60.81,"MassiveIntentClassification (ru)":65.76,"MassiveIntentClassification (sl)":56.52,"MassiveIntentClassification (sq)":57.99,"MassiveIntentClassification (sw)":53.57,"MassiveIntentClassification (ta)":57.26,"MassiveIntentClassification (te)":57.83,"MassiveIntentClassification (th)":64.07,"MassiveIntentClassification (tl)":58.91,"MassiveIntentClassification (tr)":63.54,"MassiveIntentClassification (ur)":59.28,"MassiveIntentClassification (vi)":64.07,"MassiveIntentClassification (zh-TW)":62.54,"MassiveScenarioClassification (af)":63.04,"MassiveScenarioClassification (am)":56.84,"MassiveScenarioClassification (ar)":59.62,"MassiveScenarioClassification (az)":60.85,"MassiveScenarioClassification (bn)":62.77,"MassiveScenarioClassification (cy)":50.18,"MassiveScenarioClassification (de)":69.19,"MassiveScenarioClassification (el)":67.07,"MassiveScenarioClassification (es)":69.83,"MassiveScenarioClassification (fa)":68.71,"MassiveScenarioClassification (fi)":65.95,"MassiveScenarioClassification (fr)":68.76,"MassiveScenarioClassification (he)":63.81,"MassiveScenarioClassification (hi)":67.69,"MassiveScenarioClassification (hu)":66.47,"MassiveScenarioClassification (hy)":59.5,"MassiveScenarioClassification (id)":67.92,"MassiveScenarioClassification (is)":56.49,"MassiveScenarioClassification (it)":69.04,"MassiveScenarioClassification (ja)":73.89,"MassiveScenarioClassification (jv)":56.63,"MassiveScenarioClassification (ka)":52.24,"MassiveScenarioClassification (km)":46.62,"MassiveScenarioClassification (kn)":59.16,"MassiveScenarioClassification (ko)":69.85,"MassiveScenarioClassification (lv)":56.66,"MassiveScenarioClassification (ml)":66.54,"MassiveScenarioClassification (mn)":59.31,"MassiveScenarioClassification (ms)":64.88,"MassiveScenarioClassification (my)":58.86,"MassiveScenarioClassification (nl)":70.87,"MassiveScenarioClassification (pt)":68.18,"MassiveScenarioClassification (ro)":64.65,"MassiveScenarioClassification (ru)":69.48,"MassiveScenarioClassification (sl)":60.18,"MassiveScenarioClassification (sq)":62.86,"MassiveScenarioClassification (sw)":58.15,"MassiveScenarioClassification (ta)":59.44,"MassiveScenarioClassification (te)":60.85,"MassiveScenarioClassification (th)":70.66,"MassiveScenarioClassification (tl)":60.88,"MassiveScenarioClassification (tr)":68.05,"MassiveScenarioClassification (ur)":62.11,"MassiveScenarioClassification (vi)":67.44,"MassiveScenarioClassification (zh-TW)":68.32} -{"index":96,"Rank":7,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":60.56,"AmazonCounterfactualClassification (de)":73.17,"AmazonCounterfactualClassification (ja)":76.42,"AmazonReviewsClassification (de)":39.92,"AmazonReviewsClassification (es)":39.39,"AmazonReviewsClassification (fr)":38.52,"AmazonReviewsClassification (ja)":36.44,"AmazonReviewsClassification (zh)":36.45,"MTOPDomainClassification (de)":86.95,"MTOPDomainClassification (es)":84.07,"MTOPDomainClassification (fr)":84.14,"MTOPDomainClassification (hi)":85.11,"MTOPDomainClassification (th)":81.24,"MTOPIntentClassification (de)":63.42,"MTOPIntentClassification (es)":64.44,"MTOPIntentClassification (fr)":62.01,"MTOPIntentClassification (hi)":62.58,"MTOPIntentClassification (th)":64.61,"MassiveIntentClassification (af)":56.12,"MassiveIntentClassification (am)":55.71,"MassiveIntentClassification (ar)":50.86,"MassiveIntentClassification (az)":58.97,"MassiveIntentClassification (bn)":58.22,"MassiveIntentClassification (cy)":50.16,"MassiveIntentClassification (de)":56.21,"MassiveIntentClassification (el)":57.03,"MassiveIntentClassification (es)":58.32,"MassiveIntentClassification (fa)":62.33,"MassiveIntentClassification (fi)":60.12,"MassiveIntentClassification (fr)":60.47,"MassiveIntentClassification (he)":56.55,"MassiveIntentClassification (hi)":59.4,"MassiveIntentClassification (hu)":59.52,"MassiveIntentClassification (hy)":56.2,"MassiveIntentClassification (id)":61.12,"MassiveIntentClassification (is)":54.9,"MassiveIntentClassification (it)":59.83,"MassiveIntentClassification (ja)":63.11,"MassiveIntentClassification (jv)":50.98,"MassiveIntentClassification (ka)":48.35,"MassiveIntentClassification (km)":48.55,"MassiveIntentClassification (kn)":56.24,"MassiveIntentClassification (ko)":60.99,"MassiveIntentClassification (lv)":57.1,"MassiveIntentClassification (ml)":57.91,"MassiveIntentClassification (mn)":58.5,"MassiveIntentClassification (ms)":58.6,"MassiveIntentClassification (my)":57.35,"MassiveIntentClassification (nl)":59.37,"MassiveIntentClassification (pt)":60.16,"MassiveIntentClassification (ro)":57.92,"MassiveIntentClassification (ru)":60.67,"MassiveIntentClassification (sl)":59.37,"MassiveIntentClassification (sq)":58.03,"MassiveIntentClassification (sw)":51.62,"MassiveIntentClassification (ta)":55.04,"MassiveIntentClassification (te)":58.32,"MassiveIntentClassification (th)":56.58,"MassiveIntentClassification (tl)":55.28,"MassiveIntentClassification (tr)":60.91,"MassiveIntentClassification (ur)":56.7,"MassiveIntentClassification (vi)":56.67,"MassiveIntentClassification (zh-TW)":59.51,"MassiveScenarioClassification (af)":63.39,"MassiveScenarioClassification (am)":62.02,"MassiveScenarioClassification (ar)":57.72,"MassiveScenarioClassification (az)":63.48,"MassiveScenarioClassification (bn)":61.84,"MassiveScenarioClassification (cy)":56.13,"MassiveScenarioClassification (de)":62.39,"MassiveScenarioClassification (el)":64.58,"MassiveScenarioClassification (es)":63.61,"MassiveScenarioClassification (fa)":67.46,"MassiveScenarioClassification (fi)":64.58,"MassiveScenarioClassification (fr)":65.1,"MassiveScenarioClassification (he)":63.53,"MassiveScenarioClassification (hi)":64.4,"MassiveScenarioClassification (hu)":65.82,"MassiveScenarioClassification (hy)":61.25,"MassiveScenarioClassification (id)":65.84,"MassiveScenarioClassification (is)":61.94,"MassiveScenarioClassification (it)":64.09,"MassiveScenarioClassification (ja)":67.72,"MassiveScenarioClassification (jv)":58.29,"MassiveScenarioClassification (ka)":53.38,"MassiveScenarioClassification (km)":56.18,"MassiveScenarioClassification (kn)":61.74,"MassiveScenarioClassification (ko)":67.26,"MassiveScenarioClassification (lv)":61.87,"MassiveScenarioClassification (ml)":62.26,"MassiveScenarioClassification (mn)":62.6,"MassiveScenarioClassification (ms)":65.63,"MassiveScenarioClassification (my)":62.94,"MassiveScenarioClassification (nl)":65.16,"MassiveScenarioClassification (pt)":63.28,"MassiveScenarioClassification (ro)":62.41,"MassiveScenarioClassification (ru)":65.25,"MassiveScenarioClassification (sl)":64.25,"MassiveScenarioClassification (sq)":64.54,"MassiveScenarioClassification (sw)":58.36,"MassiveScenarioClassification (ta)":59.08,"MassiveScenarioClassification (te)":64.13,"MassiveScenarioClassification (th)":64.34,"MassiveScenarioClassification (tl)":60.23,"MassiveScenarioClassification (tr)":65.43,"MassiveScenarioClassification (ur)":61.52,"MassiveScenarioClassification (vi)":61.05,"MassiveScenarioClassification (zh-TW)":67.08} -{"index":46,"Rank":8,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.73,"AmazonCounterfactualClassification (de)":70.94,"AmazonCounterfactualClassification (ja)":80.06,"AmazonReviewsClassification (de)":38.83,"AmazonReviewsClassification (es)":39.39,"AmazonReviewsClassification (fr)":39.47,"AmazonReviewsClassification (ja)":35.9,"AmazonReviewsClassification (zh)":36.07,"MTOPDomainClassification (de)":86.91,"MTOPDomainClassification (es)":87.74,"MTOPDomainClassification (fr)":86.22,"MTOPDomainClassification (hi)":82.92,"MTOPDomainClassification (th)":69.9,"MTOPIntentClassification (de)":63.26,"MTOPIntentClassification (es)":65.06,"MTOPIntentClassification (fr)":59.43,"MTOPIntentClassification (hi)":59.08,"MTOPIntentClassification (th)":43.75,"MassiveIntentClassification (af)":47.6,"MassiveIntentClassification (am)":31.57,"MassiveIntentClassification (ar)":52.63,"MassiveIntentClassification (az)":50.09,"MassiveIntentClassification (bn)":46.54,"MassiveIntentClassification (cy)":44.26,"MassiveIntentClassification (de)":61.87,"MassiveIntentClassification (el)":47.02,"MassiveIntentClassification (es)":62.54,"MassiveIntentClassification (fa)":55.19,"MassiveIntentClassification (fi)":48.43,"MassiveIntentClassification (fr)":64.27,"MassiveIntentClassification (he)":57.62,"MassiveIntentClassification (hi)":57.54,"MassiveIntentClassification (hu)":45.67,"MassiveIntentClassification (hy)":39.2,"MassiveIntentClassification (id)":55.0,"MassiveIntentClassification (is)":43.14,"MassiveIntentClassification (it)":61.0,"MassiveIntentClassification (ja)":64.29,"MassiveIntentClassification (jv)":43.69,"MassiveIntentClassification (ka)":38.35,"MassiveIntentClassification (km)":34.22,"MassiveIntentClassification (kn)":51.79,"MassiveIntentClassification (ko)":59.59,"MassiveIntentClassification (lv)":46.54,"MassiveIntentClassification (ml)":54.47,"MassiveIntentClassification (mn)":40.68,"MassiveIntentClassification (ms)":51.24,"MassiveIntentClassification (my)":31.76,"MassiveIntentClassification (nl)":60.82,"MassiveIntentClassification (pt)":62.74,"MassiveIntentClassification (ro)":49.68,"MassiveIntentClassification (ru)":60.85,"MassiveIntentClassification (sl)":48.59,"MassiveIntentClassification (sq)":47.17,"MassiveIntentClassification (sw)":45.97,"MassiveIntentClassification (ta)":53.6,"MassiveIntentClassification (te)":53.45,"MassiveIntentClassification (th)":46.17,"MassiveIntentClassification (tl)":49.48,"MassiveIntentClassification (tr)":58.03,"MassiveIntentClassification (ur)":39.26,"MassiveIntentClassification (vi)":52.16,"MassiveIntentClassification (zh-TW)":58.21,"MassiveScenarioClassification (af)":58.07,"MassiveScenarioClassification (am)":38.21,"MassiveScenarioClassification (ar)":57.47,"MassiveScenarioClassification (az)":54.37,"MassiveScenarioClassification (bn)":52.72,"MassiveScenarioClassification (cy)":49.5,"MassiveScenarioClassification (de)":71.28,"MassiveScenarioClassification (el)":52.42,"MassiveScenarioClassification (es)":67.04,"MassiveScenarioClassification (fa)":60.17,"MassiveScenarioClassification (fi)":54.05,"MassiveScenarioClassification (fr)":69.76,"MassiveScenarioClassification (he)":62.85,"MassiveScenarioClassification (hi)":62.18,"MassiveScenarioClassification (hu)":53.52,"MassiveScenarioClassification (hy)":45.95,"MassiveScenarioClassification (id)":60.33,"MassiveScenarioClassification (is)":50.1,"MassiveScenarioClassification (it)":66.49,"MassiveScenarioClassification (ja)":68.36,"MassiveScenarioClassification (jv)":50.59,"MassiveScenarioClassification (ka)":42.76,"MassiveScenarioClassification (km)":40.65,"MassiveScenarioClassification (kn)":57.25,"MassiveScenarioClassification (ko)":63.84,"MassiveScenarioClassification (lv)":53.14,"MassiveScenarioClassification (ml)":58.84,"MassiveScenarioClassification (mn)":44.82,"MassiveScenarioClassification (ms)":58.9,"MassiveScenarioClassification (my)":38.52,"MassiveScenarioClassification (nl)":67.54,"MassiveScenarioClassification (pt)":65.7,"MassiveScenarioClassification (ro)":57.2,"MassiveScenarioClassification (ru)":65.42,"MassiveScenarioClassification (sl)":55.15,"MassiveScenarioClassification (sq)":55.68,"MassiveScenarioClassification (sw)":52.3,"MassiveScenarioClassification (ta)":56.19,"MassiveScenarioClassification (te)":58.02,"MassiveScenarioClassification (th)":52.56,"MassiveScenarioClassification (tl)":57.43,"MassiveScenarioClassification (tr)":61.55,"MassiveScenarioClassification (ur)":47.11,"MassiveScenarioClassification (vi)":56.83,"MassiveScenarioClassification (zh-TW)":64.02} -{"index":49,"Rank":9,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.35,"AmazonCounterfactualClassification (de)":61.35,"AmazonCounterfactualClassification (ja)":58.23,"AmazonReviewsClassification (de)":29.7,"AmazonReviewsClassification (es)":35.97,"AmazonReviewsClassification (fr)":35.92,"AmazonReviewsClassification (ja)":27.64,"AmazonReviewsClassification (zh)":32.63,"MTOPDomainClassification (de)":82.05,"MTOPDomainClassification (es)":93.55,"MTOPDomainClassification (fr)":90.98,"MTOPDomainClassification (hi)":89.33,"MTOPDomainClassification (th)":60.49,"MTOPIntentClassification (de)":61.92,"MTOPIntentClassification (es)":74.49,"MTOPIntentClassification (fr)":69.12,"MTOPIntentClassification (hi)":64.85,"MTOPIntentClassification (th)":49.36,"MassiveIntentClassification (af)":47.85,"MassiveIntentClassification (am)":33.3,"MassiveIntentClassification (ar)":59.25,"MassiveIntentClassification (az)":45.24,"MassiveIntentClassification (bn)":61.59,"MassiveIntentClassification (cy)":44.92,"MassiveIntentClassification (de)":56.1,"MassiveIntentClassification (el)":46.13,"MassiveIntentClassification (es)":66.35,"MassiveIntentClassification (fa)":51.2,"MassiveIntentClassification (fi)":45.33,"MassiveIntentClassification (fr)":66.95,"MassiveIntentClassification (he)":43.18,"MassiveIntentClassification (hi)":63.54,"MassiveIntentClassification (hu)":44.73,"MassiveIntentClassification (hy)":38.13,"MassiveIntentClassification (id)":64.06,"MassiveIntentClassification (is)":44.35,"MassiveIntentClassification (it)":60.77,"MassiveIntentClassification (ja)":61.22,"MassiveIntentClassification (jv)":50.94,"MassiveIntentClassification (ka)":33.84,"MassiveIntentClassification (km)":37.34,"MassiveIntentClassification (kn)":53.54,"MassiveIntentClassification (ko)":53.36,"MassiveIntentClassification (lv)":46.5,"MassiveIntentClassification (ml)":58.27,"MassiveIntentClassification (mn)":40.28,"MassiveIntentClassification (ms)":59.65,"MassiveIntentClassification (my)":37.42,"MassiveIntentClassification (nl)":52.09,"MassiveIntentClassification (pt)":66.69,"MassiveIntentClassification (ro)":50.53,"MassiveIntentClassification (ru)":58.32,"MassiveIntentClassification (sl)":47.74,"MassiveIntentClassification (sq)":48.94,"MassiveIntentClassification (sw)":49.81,"MassiveIntentClassification (ta)":56.4,"MassiveIntentClassification (te)":54.71,"MassiveIntentClassification (th)":44.43,"MassiveIntentClassification (tl)":50.21,"MassiveIntentClassification (tr)":46.56,"MassiveIntentClassification (ur)":56.75,"MassiveIntentClassification (vi)":64.53,"MassiveIntentClassification (zh-TW)":62.89,"MassiveScenarioClassification (af)":51.47,"MassiveScenarioClassification (am)":34.87,"MassiveScenarioClassification (ar)":65.21,"MassiveScenarioClassification (az)":45.58,"MassiveScenarioClassification (bn)":67.3,"MassiveScenarioClassification (cy)":46.29,"MassiveScenarioClassification (de)":61.74,"MassiveScenarioClassification (el)":48.96,"MassiveScenarioClassification (es)":73.34,"MassiveScenarioClassification (fa)":53.17,"MassiveScenarioClassification (fi)":44.69,"MassiveScenarioClassification (fr)":72.91,"MassiveScenarioClassification (he)":43.1,"MassiveScenarioClassification (hi)":69.27,"MassiveScenarioClassification (hu)":45.16,"MassiveScenarioClassification (hy)":38.73,"MassiveScenarioClassification (id)":70.13,"MassiveScenarioClassification (is)":44.21,"MassiveScenarioClassification (it)":65.57,"MassiveScenarioClassification (ja)":65.76,"MassiveScenarioClassification (jv)":54.79,"MassiveScenarioClassification (ka)":32.99,"MassiveScenarioClassification (km)":39.34,"MassiveScenarioClassification (kn)":60.5,"MassiveScenarioClassification (ko)":55.69,"MassiveScenarioClassification (lv)":44.35,"MassiveScenarioClassification (ml)":65.53,"MassiveScenarioClassification (mn)":38.72,"MassiveScenarioClassification (ms)":64.99,"MassiveScenarioClassification (my)":36.84,"MassiveScenarioClassification (nl)":56.32,"MassiveScenarioClassification (pt)":71.46,"MassiveScenarioClassification (ro)":53.69,"MassiveScenarioClassification (ru)":61.6,"MassiveScenarioClassification (sl)":48.04,"MassiveScenarioClassification (sq)":50.06,"MassiveScenarioClassification (sw)":54.22,"MassiveScenarioClassification (ta)":62.77,"MassiveScenarioClassification (te)":62.59,"MassiveScenarioClassification (th)":45.18,"MassiveScenarioClassification (tl)":52.06,"MassiveScenarioClassification (tr)":47.21,"MassiveScenarioClassification (ur)":64.26,"MassiveScenarioClassification (vi)":70.61,"MassiveScenarioClassification (zh-TW)":70.3} -{"index":113,"Rank":10,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.59,"AmazonCounterfactualClassification (de)":68.69,"AmazonCounterfactualClassification (ja)":61.61,"AmazonReviewsClassification (de)":33.39,"AmazonReviewsClassification (es)":34.82,"AmazonReviewsClassification (fr)":33.45,"AmazonReviewsClassification (ja)":30.05,"AmazonReviewsClassification (zh)":32.52,"MTOPDomainClassification (de)":78.59,"MTOPDomainClassification (es)":79.24,"MTOPDomainClassification (fr)":76.17,"MTOPDomainClassification (hi)":78.75,"MTOPDomainClassification (th)":77.67,"MTOPIntentClassification (de)":55.29,"MTOPIntentClassification (es)":58.68,"MTOPIntentClassification (fr)":53.26,"MTOPIntentClassification (hi)":59.62,"MTOPIntentClassification (th)":58.8,"MassiveIntentClassification (af)":45.42,"MassiveIntentClassification (am)":37.68,"MassiveIntentClassification (ar)":45.02,"MassiveIntentClassification (az)":48.71,"MassiveIntentClassification (bn)":43.79,"MassiveIntentClassification (cy)":28.76,"MassiveIntentClassification (de)":51.56,"MassiveIntentClassification (el)":56.47,"MassiveIntentClassification (es)":58.28,"MassiveIntentClassification (fa)":59.05,"MassiveIntentClassification (fi)":57.36,"MassiveIntentClassification (fr)":58.8,"MassiveIntentClassification (he)":51.18,"MassiveIntentClassification (hi)":57.06,"MassiveIntentClassification (hu)":58.36,"MassiveIntentClassification (hy)":52.11,"MassiveIntentClassification (id)":58.27,"MassiveIntentClassification (is)":35.81,"MassiveIntentClassification (it)":58.28,"MassiveIntentClassification (ja)":60.78,"MassiveIntentClassification (jv)":31.15,"MassiveIntentClassification (ka)":44.5,"MassiveIntentClassification (km)":40.99,"MassiveIntentClassification (kn)":46.96,"MassiveIntentClassification (ko)":54.73,"MassiveIntentClassification (lv)":54.87,"MassiveIntentClassification (ml)":47.89,"MassiveIntentClassification (mn)":52.23,"MassiveIntentClassification (ms)":54.28,"MassiveIntentClassification (my)":51.96,"MassiveIntentClassification (nl)":59.45,"MassiveIntentClassification (pt)":59.84,"MassiveIntentClassification (ro)":57.04,"MassiveIntentClassification (ru)":58.02,"MassiveIntentClassification (sl)":56.36,"MassiveIntentClassification (sq)":56.48,"MassiveIntentClassification (sw)":33.96,"MassiveIntentClassification (ta)":44.29,"MassiveIntentClassification (te)":47.14,"MassiveIntentClassification (th)":56.86,"MassiveIntentClassification (tl)":35.36,"MassiveIntentClassification (tr)":59.63,"MassiveIntentClassification (ur)":52.79,"MassiveIntentClassification (vi)":54.65,"MassiveIntentClassification (zh-TW)":57.47,"MassiveScenarioClassification (af)":50.86,"MassiveScenarioClassification (am)":41.18,"MassiveScenarioClassification (ar)":50.08,"MassiveScenarioClassification (az)":51.29,"MassiveScenarioClassification (bn)":46.53,"MassiveScenarioClassification (cy)":34.35,"MassiveScenarioClassification (de)":56.4,"MassiveScenarioClassification (el)":61.8,"MassiveScenarioClassification (es)":62.21,"MassiveScenarioClassification (fa)":62.44,"MassiveScenarioClassification (fi)":61.1,"MassiveScenarioClassification (fr)":63.39,"MassiveScenarioClassification (he)":56.29,"MassiveScenarioClassification (hi)":60.63,"MassiveScenarioClassification (hu)":63.29,"MassiveScenarioClassification (hy)":54.88,"MassiveScenarioClassification (id)":61.99,"MassiveScenarioClassification (is)":38.58,"MassiveScenarioClassification (it)":62.35,"MassiveScenarioClassification (ja)":65.17,"MassiveScenarioClassification (jv)":36.13,"MassiveScenarioClassification (ka)":50.27,"MassiveScenarioClassification (km)":44.24,"MassiveScenarioClassification (kn)":47.37,"MassiveScenarioClassification (ko)":58.89,"MassiveScenarioClassification (lv)":56.51,"MassiveScenarioClassification (ml)":50.06,"MassiveScenarioClassification (mn)":55.05,"MassiveScenarioClassification (ms)":59.77,"MassiveScenarioClassification (my)":55.72,"MassiveScenarioClassification (nl)":63.38,"MassiveScenarioClassification (pt)":62.41,"MassiveScenarioClassification (ro)":60.68,"MassiveScenarioClassification (ru)":62.31,"MassiveScenarioClassification (sl)":61.43,"MassiveScenarioClassification (sq)":62.23,"MassiveScenarioClassification (sw)":38.52,"MassiveScenarioClassification (ta)":47.0,"MassiveScenarioClassification (te)":51.02,"MassiveScenarioClassification (th)":63.23,"MassiveScenarioClassification (tl)":38.72,"MassiveScenarioClassification (tr)":64.49,"MassiveScenarioClassification (ur)":56.8,"MassiveScenarioClassification (vi)":57.06,"MassiveScenarioClassification (zh-TW)":63.37} -{"index":72,"Rank":11,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.6,"AmazonCounterfactualClassification (de)":66.03,"AmazonCounterfactualClassification (ja)":58.77,"AmazonReviewsClassification (de)":30.45,"AmazonReviewsClassification (es)":40.8,"AmazonReviewsClassification (fr)":35.12,"AmazonReviewsClassification (ja)":32.07,"AmazonReviewsClassification (zh)":38.12,"MTOPDomainClassification (de)":74.64,"MTOPDomainClassification (es)":92.07,"MTOPDomainClassification (fr)":69.24,"MTOPDomainClassification (hi)":88.54,"MTOPDomainClassification (th)":55.63,"MTOPIntentClassification (de)":53.4,"MTOPIntentClassification (es)":71.33,"MTOPIntentClassification (fr)":51.25,"MTOPIntentClassification (hi)":66.73,"MTOPIntentClassification (th)":42.13,"MassiveIntentClassification (af)":44.98,"MassiveIntentClassification (am)":25.35,"MassiveIntentClassification (ar)":57.08,"MassiveIntentClassification (az)":39.11,"MassiveIntentClassification (bn)":61.37,"MassiveIntentClassification (cy)":42.97,"MassiveIntentClassification (de)":50.93,"MassiveIntentClassification (el)":40.09,"MassiveIntentClassification (es)":63.81,"MassiveIntentClassification (fa)":49.06,"MassiveIntentClassification (fi)":42.58,"MassiveIntentClassification (fr)":43.21,"MassiveIntentClassification (he)":37.15,"MassiveIntentClassification (hi)":62.89,"MassiveIntentClassification (hu)":41.62,"MassiveIntentClassification (hy)":32.98,"MassiveIntentClassification (id)":62.11,"MassiveIntentClassification (is)":41.04,"MassiveIntentClassification (it)":55.83,"MassiveIntentClassification (ja)":58.83,"MassiveIntentClassification (jv)":49.31,"MassiveIntentClassification (ka)":26.43,"MassiveIntentClassification (km)":28.77,"MassiveIntentClassification (kn)":52.33,"MassiveIntentClassification (ko)":46.93,"MassiveIntentClassification (lv)":44.26,"MassiveIntentClassification (ml)":57.75,"MassiveIntentClassification (mn)":33.31,"MassiveIntentClassification (ms)":55.7,"MassiveIntentClassification (my)":27.39,"MassiveIntentClassification (nl)":48.34,"MassiveIntentClassification (pt)":64.74,"MassiveIntentClassification (ro)":48.41,"MassiveIntentClassification (ru)":52.99,"MassiveIntentClassification (sl)":44.77,"MassiveIntentClassification (sq)":45.45,"MassiveIntentClassification (sw)":46.46,"MassiveIntentClassification (ta)":55.46,"MassiveIntentClassification (te)":51.41,"MassiveIntentClassification (th)":39.2,"MassiveIntentClassification (tl)":48.53,"MassiveIntentClassification (tr)":39.51,"MassiveIntentClassification (ur)":54.72,"MassiveIntentClassification (vi)":62.01,"MassiveIntentClassification (zh-TW)":62.56,"MassiveScenarioClassification (af)":50.47,"MassiveScenarioClassification (am)":27.22,"MassiveScenarioClassification (ar)":65.43,"MassiveScenarioClassification (az)":40.74,"MassiveScenarioClassification (bn)":67.65,"MassiveScenarioClassification (cy)":43.94,"MassiveScenarioClassification (de)":56.67,"MassiveScenarioClassification (el)":41.81,"MassiveScenarioClassification (es)":71.78,"MassiveScenarioClassification (fa)":49.96,"MassiveScenarioClassification (fi)":41.01,"MassiveScenarioClassification (fr)":49.78,"MassiveScenarioClassification (he)":36.69,"MassiveScenarioClassification (hi)":69.28,"MassiveScenarioClassification (hu)":44.31,"MassiveScenarioClassification (hy)":33.64,"MassiveScenarioClassification (id)":68.98,"MassiveScenarioClassification (is)":42.1,"MassiveScenarioClassification (it)":60.27,"MassiveScenarioClassification (ja)":62.48,"MassiveScenarioClassification (jv)":54.68,"MassiveScenarioClassification (ka)":27.22,"MassiveScenarioClassification (km)":32.14,"MassiveScenarioClassification (kn)":57.95,"MassiveScenarioClassification (ko)":47.95,"MassiveScenarioClassification (lv)":42.76,"MassiveScenarioClassification (ml)":62.84,"MassiveScenarioClassification (mn)":33.21,"MassiveScenarioClassification (ms)":62.57,"MassiveScenarioClassification (my)":28.84,"MassiveScenarioClassification (nl)":52.85,"MassiveScenarioClassification (pt)":70.24,"MassiveScenarioClassification (ro)":52.73,"MassiveScenarioClassification (ru)":54.26,"MassiveScenarioClassification (sl)":46.89,"MassiveScenarioClassification (sq)":47.16,"MassiveScenarioClassification (sw)":51.2,"MassiveScenarioClassification (ta)":61.84,"MassiveScenarioClassification (te)":59.79,"MassiveScenarioClassification (th)":41.62,"MassiveScenarioClassification (tl)":50.47,"MassiveScenarioClassification (tr)":43.41,"MassiveScenarioClassification (ur)":60.15,"MassiveScenarioClassification (vi)":68.99,"MassiveScenarioClassification (zh-TW)":71.7} -{"index":73,"Rank":12,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.37,"AmazonCounterfactualClassification (de)":66.42,"AmazonCounterfactualClassification (ja)":56.86,"AmazonReviewsClassification (de)":26.85,"AmazonReviewsClassification (es)":38.97,"AmazonReviewsClassification (fr)":26.85,"AmazonReviewsClassification (ja)":28.31,"AmazonReviewsClassification (zh)":35.7,"MTOPDomainClassification (de)":68.42,"MTOPDomainClassification (es)":88.21,"MTOPDomainClassification (fr)":34.99,"MTOPDomainClassification (hi)":84.23,"MTOPDomainClassification (th)":53.17,"MTOPIntentClassification (de)":49.17,"MTOPIntentClassification (es)":65.72,"MTOPIntentClassification (fr)":15.76,"MTOPIntentClassification (hi)":61.88,"MTOPIntentClassification (th)":41.67,"MassiveIntentClassification (af)":43.29,"MassiveIntentClassification (am)":23.21,"MassiveIntentClassification (ar)":53.38,"MassiveIntentClassification (az)":39.56,"MassiveIntentClassification (bn)":56.74,"MassiveIntentClassification (cy)":40.0,"MassiveIntentClassification (de)":45.82,"MassiveIntentClassification (el)":37.87,"MassiveIntentClassification (es)":61.17,"MassiveIntentClassification (fa)":45.65,"MassiveIntentClassification (fi)":40.28,"MassiveIntentClassification (fr)":15.09,"MassiveIntentClassification (he)":32.23,"MassiveIntentClassification (hi)":59.46,"MassiveIntentClassification (hu)":40.91,"MassiveIntentClassification (hy)":29.94,"MassiveIntentClassification (id)":59.14,"MassiveIntentClassification (is)":39.62,"MassiveIntentClassification (it)":51.77,"MassiveIntentClassification (ja)":53.75,"MassiveIntentClassification (jv)":46.29,"MassiveIntentClassification (ka)":25.11,"MassiveIntentClassification (km)":27.22,"MassiveIntentClassification (kn)":47.97,"MassiveIntentClassification (ko)":40.54,"MassiveIntentClassification (lv)":43.14,"MassiveIntentClassification (ml)":53.69,"MassiveIntentClassification (mn)":33.37,"MassiveIntentClassification (ms)":51.94,"MassiveIntentClassification (my)":25.32,"MassiveIntentClassification (nl)":44.03,"MassiveIntentClassification (pt)":61.74,"MassiveIntentClassification (ro)":45.73,"MassiveIntentClassification (ru)":47.61,"MassiveIntentClassification (sl)":42.83,"MassiveIntentClassification (sq)":43.61,"MassiveIntentClassification (sw)":45.55,"MassiveIntentClassification (ta)":51.24,"MassiveIntentClassification (te)":47.43,"MassiveIntentClassification (th)":36.88,"MassiveIntentClassification (tl)":45.93,"MassiveIntentClassification (tr)":38.59,"MassiveIntentClassification (ur)":51.85,"MassiveIntentClassification (vi)":58.72,"MassiveIntentClassification (zh-TW)":59.95,"MassiveScenarioClassification (af)":47.42,"MassiveScenarioClassification (am)":24.71,"MassiveScenarioClassification (ar)":62.09,"MassiveScenarioClassification (az)":39.25,"MassiveScenarioClassification (bn)":63.37,"MassiveScenarioClassification (cy)":39.17,"MassiveScenarioClassification (de)":50.71,"MassiveScenarioClassification (el)":39.47,"MassiveScenarioClassification (es)":68.31,"MassiveScenarioClassification (fa)":45.65,"MassiveScenarioClassification (fi)":38.95,"MassiveScenarioClassification (fr)":21.67,"MassiveScenarioClassification (he)":32.13,"MassiveScenarioClassification (hi)":65.57,"MassiveScenarioClassification (hu)":42.97,"MassiveScenarioClassification (hy)":32.13,"MassiveScenarioClassification (id)":65.11,"MassiveScenarioClassification (is)":40.84,"MassiveScenarioClassification (it)":54.55,"MassiveScenarioClassification (ja)":57.15,"MassiveScenarioClassification (jv)":49.3,"MassiveScenarioClassification (ka)":25.86,"MassiveScenarioClassification (km)":31.18,"MassiveScenarioClassification (kn)":53.01,"MassiveScenarioClassification (ko)":40.25,"MassiveScenarioClassification (lv)":41.88,"MassiveScenarioClassification (ml)":59.08,"MassiveScenarioClassification (mn)":33.34,"MassiveScenarioClassification (ms)":57.45,"MassiveScenarioClassification (my)":27.2,"MassiveScenarioClassification (nl)":48.42,"MassiveScenarioClassification (pt)":66.41,"MassiveScenarioClassification (ro)":50.08,"MassiveScenarioClassification (ru)":49.94,"MassiveScenarioClassification (sl)":43.43,"MassiveScenarioClassification (sq)":44.08,"MassiveScenarioClassification (sw)":49.53,"MassiveScenarioClassification (ta)":56.79,"MassiveScenarioClassification (te)":54.01,"MassiveScenarioClassification (th)":38.58,"MassiveScenarioClassification (tl)":48.07,"MassiveScenarioClassification (tr)":40.65,"MassiveScenarioClassification (ur)":57.75,"MassiveScenarioClassification (vi)":65.83,"MassiveScenarioClassification (zh-TW)":69.64} -{"index":99,"Rank":13,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":44.78,"AmazonCounterfactualClassification (de)":68.14,"AmazonCounterfactualClassification (ja)":65.39,"AmazonReviewsClassification (de)":35.03,"AmazonReviewsClassification (es)":36.24,"AmazonReviewsClassification (fr)":35.7,"AmazonReviewsClassification (ja)":31.08,"AmazonReviewsClassification (zh)":33.89,"MTOPDomainClassification (de)":86.19,"MTOPDomainClassification (es)":87.75,"MTOPDomainClassification (fr)":84.61,"MTOPDomainClassification (hi)":76.41,"MTOPDomainClassification (th)":73.62,"MTOPIntentClassification (de)":59.21,"MTOPIntentClassification (es)":57.21,"MTOPIntentClassification (fr)":53.41,"MTOPIntentClassification (hi)":45.54,"MTOPIntentClassification (th)":47.73,"MassiveIntentClassification (af)":40.02,"MassiveIntentClassification (am)":2.35,"MassiveIntentClassification (ar)":43.14,"MassiveIntentClassification (az)":25.6,"MassiveIntentClassification (bn)":4.84,"MassiveIntentClassification (cy)":15.43,"MassiveIntentClassification (de)":51.57,"MassiveIntentClassification (el)":49.65,"MassiveIntentClassification (es)":56.57,"MassiveIntentClassification (fa)":55.36,"MassiveIntentClassification (fi)":45.72,"MassiveIntentClassification (fr)":57.02,"MassiveIntentClassification (he)":46.74,"MassiveIntentClassification (hi)":48.55,"MassiveIntentClassification (hu)":50.65,"MassiveIntentClassification (hy)":40.79,"MassiveIntentClassification (id)":56.0,"MassiveIntentClassification (is)":16.08,"MassiveIntentClassification (it)":57.65,"MassiveIntentClassification (ja)":55.33,"MassiveIntentClassification (jv)":28.16,"MassiveIntentClassification (ka)":29.41,"MassiveIntentClassification (km)":4.79,"MassiveIntentClassification (kn)":3.37,"MassiveIntentClassification (ko)":49.97,"MassiveIntentClassification (lv)":44.31,"MassiveIntentClassification (ml)":3.24,"MassiveIntentClassification (mn)":40.37,"MassiveIntentClassification (ms)":47.97,"MassiveIntentClassification (my)":38.48,"MassiveIntentClassification (nl)":58.29,"MassiveIntentClassification (pt)":58.63,"MassiveIntentClassification (ro)":50.63,"MassiveIntentClassification (ru)":57.96,"MassiveIntentClassification (sl)":50.66,"MassiveIntentClassification (sq)":50.25,"MassiveIntentClassification (sw)":19.29,"MassiveIntentClassification (ta)":3.79,"MassiveIntentClassification (te)":3.36,"MassiveIntentClassification (th)":45.28,"MassiveIntentClassification (tl)":28.44,"MassiveIntentClassification (tr)":50.47,"MassiveIntentClassification (ur)":46.03,"MassiveIntentClassification (vi)":45.25,"MassiveIntentClassification (zh-TW)":54.96,"MassiveScenarioClassification (af)":53.67,"MassiveScenarioClassification (am)":7.72,"MassiveScenarioClassification (ar)":52.19,"MassiveScenarioClassification (az)":34.75,"MassiveScenarioClassification (bn)":10.65,"MassiveScenarioClassification (cy)":21.24,"MassiveScenarioClassification (de)":61.4,"MassiveScenarioClassification (el)":60.68,"MassiveScenarioClassification (es)":64.61,"MassiveScenarioClassification (fa)":59.24,"MassiveScenarioClassification (fi)":54.66,"MassiveScenarioClassification (fr)":65.2,"MassiveScenarioClassification (he)":54.74,"MassiveScenarioClassification (hi)":55.99,"MassiveScenarioClassification (hu)":61.2,"MassiveScenarioClassification (hy)":49.63,"MassiveScenarioClassification (id)":65.25,"MassiveScenarioClassification (is)":22.6,"MassiveScenarioClassification (it)":64.63,"MassiveScenarioClassification (ja)":62.32,"MassiveScenarioClassification (jv)":35.77,"MassiveScenarioClassification (ka)":39.08,"MassiveScenarioClassification (km)":9.24,"MassiveScenarioClassification (kn)":8.28,"MassiveScenarioClassification (ko)":57.6,"MassiveScenarioClassification (lv)":51.72,"MassiveScenarioClassification (ml)":8.25,"MassiveScenarioClassification (mn)":47.21,"MassiveScenarioClassification (ms)":55.65,"MassiveScenarioClassification (my)":43.31,"MassiveScenarioClassification (nl)":67.49,"MassiveScenarioClassification (pt)":64.26,"MassiveScenarioClassification (ro)":58.03,"MassiveScenarioClassification (ru)":65.41,"MassiveScenarioClassification (sl)":59.36,"MassiveScenarioClassification (sq)":62.69,"MassiveScenarioClassification (sw)":25.12,"MassiveScenarioClassification (ta)":8.67,"MassiveScenarioClassification (te)":7.82,"MassiveScenarioClassification (th)":54.65,"MassiveScenarioClassification (tl)":36.09,"MassiveScenarioClassification (tr)":60.89,"MassiveScenarioClassification (ur)":54.71,"MassiveScenarioClassification (vi)":55.15,"MassiveScenarioClassification (zh-TW)":62.89} -{"index":5,"Rank":14,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":42.45,"AmazonCounterfactualClassification (de)":67.82,"AmazonCounterfactualClassification (ja)":68.76,"AmazonReviewsClassification (de)":31.07,"AmazonReviewsClassification (es)":32.72,"AmazonReviewsClassification (fr)":31.12,"AmazonReviewsClassification (ja)":28.94,"AmazonReviewsClassification (zh)":30.89,"MTOPDomainClassification (de)":74.08,"MTOPDomainClassification (es)":73.47,"MTOPDomainClassification (fr)":72.26,"MTOPDomainClassification (hi)":72.95,"MTOPDomainClassification (th)":72.68,"MTOPIntentClassification (de)":51.62,"MTOPIntentClassification (es)":52.75,"MTOPIntentClassification (fr)":50.12,"MTOPIntentClassification (hi)":45.55,"MTOPIntentClassification (th)":50.07,"MassiveIntentClassification (af)":38.01,"MassiveIntentClassification (am)":12.7,"MassiveIntentClassification (ar)":37.16,"MassiveIntentClassification (az)":19.98,"MassiveIntentClassification (bn)":42.51,"MassiveIntentClassification (cy)":17.33,"MassiveIntentClassification (de)":44.79,"MassiveIntentClassification (el)":46.71,"MassiveIntentClassification (es)":45.44,"MassiveIntentClassification (fa)":45.01,"MassiveIntentClassification (fi)":45.94,"MassiveIntentClassification (fr)":46.13,"MassiveIntentClassification (he)":42.55,"MassiveIntentClassification (hi)":40.2,"MassiveIntentClassification (hu)":42.77,"MassiveIntentClassification (hy)":28.07,"MassiveIntentClassification (id)":45.81,"MassiveIntentClassification (is)":39.86,"MassiveIntentClassification (it)":48.25,"MassiveIntentClassification (ja)":45.3,"MassiveIntentClassification (jv)":24.3,"MassiveIntentClassification (ka)":22.7,"MassiveIntentClassification (km)":22.48,"MassiveIntentClassification (kn)":4.32,"MassiveIntentClassification (ko)":44.26,"MassiveIntentClassification (lv)":39.75,"MassiveIntentClassification (ml)":41.33,"MassiveIntentClassification (mn)":16.2,"MassiveIntentClassification (ms)":43.23,"MassiveIntentClassification (my)":25.37,"MassiveIntentClassification (nl)":45.0,"MassiveIntentClassification (pt)":48.55,"MassiveIntentClassification (ro)":44.3,"MassiveIntentClassification (ru)":44.29,"MassiveIntentClassification (sl)":44.72,"MassiveIntentClassification (sq)":46.12,"MassiveIntentClassification (sw)":31.89,"MassiveIntentClassification (ta)":29.63,"MassiveIntentClassification (te)":36.03,"MassiveIntentClassification (th)":43.39,"MassiveIntentClassification (tl)":29.73,"MassiveIntentClassification (tr)":43.93,"MassiveIntentClassification (ur)":26.11,"MassiveIntentClassification (vi)":44.33,"MassiveIntentClassification (zh-TW)":32.93,"MassiveScenarioClassification (af)":47.1,"MassiveScenarioClassification (am)":17.7,"MassiveScenarioClassification (ar)":45.21,"MassiveScenarioClassification (az)":28.21,"MassiveScenarioClassification (bn)":50.52,"MassiveScenarioClassification (cy)":22.58,"MassiveScenarioClassification (de)":54.34,"MassiveScenarioClassification (el)":55.47,"MassiveScenarioClassification (es)":52.77,"MassiveScenarioClassification (fa)":52.5,"MassiveScenarioClassification (fi)":52.63,"MassiveScenarioClassification (fr)":54.32,"MassiveScenarioClassification (he)":52.41,"MassiveScenarioClassification (hi)":47.37,"MassiveScenarioClassification (hu)":53.43,"MassiveScenarioClassification (hy)":33.57,"MassiveScenarioClassification (id)":54.38,"MassiveScenarioClassification (is)":49.78,"MassiveScenarioClassification (it)":54.84,"MassiveScenarioClassification (ja)":54.12,"MassiveScenarioClassification (jv)":32.71,"MassiveScenarioClassification (ka)":26.92,"MassiveScenarioClassification (km)":27.23,"MassiveScenarioClassification (kn)":10.06,"MassiveScenarioClassification (ko)":52.01,"MassiveScenarioClassification (lv)":44.82,"MassiveScenarioClassification (ml)":49.1,"MassiveScenarioClassification (mn)":21.51,"MassiveScenarioClassification (ms)":53.6,"MassiveScenarioClassification (my)":29.72,"MassiveScenarioClassification (nl)":53.33,"MassiveScenarioClassification (pt)":53.41,"MassiveScenarioClassification (ro)":50.48,"MassiveScenarioClassification (ru)":51.84,"MassiveScenarioClassification (sl)":51.29,"MassiveScenarioClassification (sq)":55.65,"MassiveScenarioClassification (sw)":42.04,"MassiveScenarioClassification (ta)":36.72,"MassiveScenarioClassification (te)":42.08,"MassiveScenarioClassification (th)":52.15,"MassiveScenarioClassification (tl)":37.34,"MassiveScenarioClassification (tr)":52.56,"MassiveScenarioClassification (ur)":32.6,"MassiveScenarioClassification (vi)":50.97,"MassiveScenarioClassification (zh-TW)":42.32} -{"index":39,"Rank":15,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.64,"AmazonCounterfactualClassification (de)":56.88,"AmazonCounterfactualClassification (ja)":54.65,"AmazonReviewsClassification (de)":24.79,"AmazonReviewsClassification (es)":26.64,"AmazonReviewsClassification (fr)":26.39,"AmazonReviewsClassification (ja)":22.08,"AmazonReviewsClassification (zh)":24.27,"MTOPDomainClassification (de)":62.73,"MTOPDomainClassification (es)":67.55,"MTOPDomainClassification (fr)":65.35,"MTOPDomainClassification (hi)":45.37,"MTOPDomainClassification (th)":55.28,"MTOPIntentClassification (de)":49.56,"MTOPIntentClassification (es)":49.94,"MTOPIntentClassification (fr)":46.33,"MTOPIntentClassification (hi)":32.21,"MTOPIntentClassification (th)":43.63,"MassiveIntentClassification (af)":40.55,"MassiveIntentClassification (am)":24.18,"MassiveIntentClassification (ar)":30.13,"MassiveIntentClassification (az)":35.88,"MassiveIntentClassification (bn)":29.17,"MassiveIntentClassification (cy)":41.79,"MassiveIntentClassification (de)":42.07,"MassiveIntentClassification (el)":36.25,"MassiveIntentClassification (es)":42.68,"MassiveIntentClassification (fa)":35.59,"MassiveIntentClassification (fi)":40.04,"MassiveIntentClassification (fr)":43.44,"MassiveIntentClassification (he)":31.59,"MassiveIntentClassification (hi)":27.04,"MassiveIntentClassification (hu)":38.45,"MassiveIntentClassification (hy)":27.98,"MassiveIntentClassification (id)":43.97,"MassiveIntentClassification (is)":40.3,"MassiveIntentClassification (it)":45.47,"MassiveIntentClassification (ja)":45.61,"MassiveIntentClassification (jv)":38.67,"MassiveIntentClassification (ka)":25.65,"MassiveIntentClassification (km)":28.3,"MassiveIntentClassification (kn)":23.48,"MassiveIntentClassification (ko)":36.56,"MassiveIntentClassification (lv)":41.85,"MassiveIntentClassification (ml)":24.91,"MassiveIntentClassification (mn)":29.86,"MassiveIntentClassification (ms)":42.42,"MassiveIntentClassification (my)":25.13,"MassiveIntentClassification (nl)":43.62,"MassiveIntentClassification (pt)":45.21,"MassiveIntentClassification (ro)":41.81,"MassiveIntentClassification (ru)":35.97,"MassiveIntentClassification (sl)":40.61,"MassiveIntentClassification (sq)":42.76,"MassiveIntentClassification (sw)":41.12,"MassiveIntentClassification (ta)":24.6,"MassiveIntentClassification (te)":25.04,"MassiveIntentClassification (th)":35.4,"MassiveIntentClassification (tl)":41.19,"MassiveIntentClassification (tr)":36.41,"MassiveIntentClassification (ur)":25.93,"MassiveIntentClassification (vi)":38.8,"MassiveIntentClassification (zh-TW)":42.31,"MassiveScenarioClassification (af)":43.25,"MassiveScenarioClassification (am)":25.3,"MassiveScenarioClassification (ar)":32.07,"MassiveScenarioClassification (az)":36.68,"MassiveScenarioClassification (bn)":29.57,"MassiveScenarioClassification (cy)":42.1,"MassiveScenarioClassification (de)":43.21,"MassiveScenarioClassification (el)":36.5,"MassiveScenarioClassification (es)":44.08,"MassiveScenarioClassification (fa)":32.61,"MassiveScenarioClassification (fi)":40.36,"MassiveScenarioClassification (fr)":45.07,"MassiveScenarioClassification (he)":32.18,"MassiveScenarioClassification (hi)":26.9,"MassiveScenarioClassification (hu)":40.38,"MassiveScenarioClassification (hy)":28.38,"MassiveScenarioClassification (id)":44.36,"MassiveScenarioClassification (is)":39.29,"MassiveScenarioClassification (it)":46.47,"MassiveScenarioClassification (ja)":46.26,"MassiveScenarioClassification (jv)":41.13,"MassiveScenarioClassification (ka)":24.73,"MassiveScenarioClassification (km)":29.74,"MassiveScenarioClassification (kn)":23.85,"MassiveScenarioClassification (ko)":36.57,"MassiveScenarioClassification (lv)":40.93,"MassiveScenarioClassification (ml)":25.53,"MassiveScenarioClassification (mn)":29.11,"MassiveScenarioClassification (ms)":43.79,"MassiveScenarioClassification (my)":27.27,"MassiveScenarioClassification (nl)":45.36,"MassiveScenarioClassification (pt)":45.9,"MassiveScenarioClassification (ro)":44.12,"MassiveScenarioClassification (ru)":32.76,"MassiveScenarioClassification (sl)":40.5,"MassiveScenarioClassification (sq)":42.52,"MassiveScenarioClassification (sw)":43.0,"MassiveScenarioClassification (ta)":28.33,"MassiveScenarioClassification (te)":26.59,"MassiveScenarioClassification (th)":36.79,"MassiveScenarioClassification (tl)":42.57,"MassiveScenarioClassification (tr)":37.09,"MassiveScenarioClassification (ur)":28.84,"MassiveScenarioClassification (vi)":37.36,"MassiveScenarioClassification (zh-TW)":44.42} -{"index":40,"Rank":16,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.66,"AmazonCounterfactualClassification (de)":59.08,"AmazonCounterfactualClassification (ja)":56.42,"AmazonReviewsClassification (de)":24.52,"AmazonReviewsClassification (es)":29.1,"AmazonReviewsClassification (fr)":27.4,"AmazonReviewsClassification (ja)":21.72,"AmazonReviewsClassification (zh)":23.98,"MTOPDomainClassification (de)":60.37,"MTOPDomainClassification (es)":67.37,"MTOPDomainClassification (fr)":63.13,"MTOPDomainClassification (hi)":47.05,"MTOPDomainClassification (th)":52.28,"MTOPIntentClassification (de)":45.07,"MTOPIntentClassification (es)":48.81,"MTOPIntentClassification (fr)":44.34,"MTOPIntentClassification (hi)":34.2,"MTOPIntentClassification (th)":43.11,"MassiveIntentClassification (af)":37.79,"MassiveIntentClassification (am)":23.72,"MassiveIntentClassification (ar)":29.64,"MassiveIntentClassification (az)":39.48,"MassiveIntentClassification (bn)":26.55,"MassiveIntentClassification (cy)":38.78,"MassiveIntentClassification (de)":40.39,"MassiveIntentClassification (el)":37.29,"MassiveIntentClassification (es)":41.18,"MassiveIntentClassification (fa)":36.42,"MassiveIntentClassification (fi)":38.76,"MassiveIntentClassification (fr)":43.67,"MassiveIntentClassification (he)":31.98,"MassiveIntentClassification (hi)":28.04,"MassiveIntentClassification (hu)":38.14,"MassiveIntentClassification (hy)":26.05,"MassiveIntentClassification (id)":41.16,"MassiveIntentClassification (is)":38.63,"MassiveIntentClassification (it)":44.04,"MassiveIntentClassification (ja)":46.21,"MassiveIntentClassification (jv)":37.61,"MassiveIntentClassification (ka)":24.47,"MassiveIntentClassification (km)":26.24,"MassiveIntentClassification (kn)":17.83,"MassiveIntentClassification (ko)":37.27,"MassiveIntentClassification (lv)":40.93,"MassiveIntentClassification (ml)":17.89,"MassiveIntentClassification (mn)":32.98,"MassiveIntentClassification (ms)":40.91,"MassiveIntentClassification (my)":17.83,"MassiveIntentClassification (nl)":41.76,"MassiveIntentClassification (pt)":44.54,"MassiveIntentClassification (ro)":39.97,"MassiveIntentClassification (ru)":37.46,"MassiveIntentClassification (sl)":38.29,"MassiveIntentClassification (sq)":40.95,"MassiveIntentClassification (sw)":38.33,"MassiveIntentClassification (ta)":19.03,"MassiveIntentClassification (te)":19.38,"MassiveIntentClassification (th)":34.09,"MassiveIntentClassification (tl)":40.29,"MassiveIntentClassification (tr)":38.86,"MassiveIntentClassification (ur)":27.83,"MassiveIntentClassification (vi)":38.71,"MassiveIntentClassification (zh-TW)":42.32,"MassiveScenarioClassification (af)":40.25,"MassiveScenarioClassification (am)":25.69,"MassiveScenarioClassification (ar)":32.4,"MassiveScenarioClassification (az)":40.53,"MassiveScenarioClassification (bn)":27.23,"MassiveScenarioClassification (cy)":38.7,"MassiveScenarioClassification (de)":41.36,"MassiveScenarioClassification (el)":38.44,"MassiveScenarioClassification (es)":44.18,"MassiveScenarioClassification (fa)":34.83,"MassiveScenarioClassification (fi)":40.56,"MassiveScenarioClassification (fr)":45.92,"MassiveScenarioClassification (he)":32.08,"MassiveScenarioClassification (hi)":28.37,"MassiveScenarioClassification (hu)":39.49,"MassiveScenarioClassification (hy)":25.9,"MassiveScenarioClassification (id)":40.96,"MassiveScenarioClassification (is)":38.56,"MassiveScenarioClassification (it)":46.59,"MassiveScenarioClassification (ja)":46.25,"MassiveScenarioClassification (jv)":39.66,"MassiveScenarioClassification (ka)":25.28,"MassiveScenarioClassification (km)":28.97,"MassiveScenarioClassification (kn)":19.27,"MassiveScenarioClassification (ko)":35.73,"MassiveScenarioClassification (lv)":39.57,"MassiveScenarioClassification (ml)":19.9,"MassiveScenarioClassification (mn)":32.43,"MassiveScenarioClassification (ms)":42.32,"MassiveScenarioClassification (my)":20.86,"MassiveScenarioClassification (nl)":43.59,"MassiveScenarioClassification (pt)":46.31,"MassiveScenarioClassification (ro)":42.53,"MassiveScenarioClassification (ru)":35.95,"MassiveScenarioClassification (sl)":38.69,"MassiveScenarioClassification (sq)":40.47,"MassiveScenarioClassification (sw)":39.55,"MassiveScenarioClassification (ta)":22.88,"MassiveScenarioClassification (te)":20.51,"MassiveScenarioClassification (th)":34.93,"MassiveScenarioClassification (tl)":40.75,"MassiveScenarioClassification (tr)":39.07,"MassiveScenarioClassification (ur)":29.75,"MassiveScenarioClassification (vi)":38.02,"MassiveScenarioClassification (zh-TW)":45.18} -{"index":97,"Rank":17,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":31.93,"AmazonCounterfactualClassification (de)":57.1,"AmazonCounterfactualClassification (ja)":59.91,"AmazonReviewsClassification (de)":25.91,"AmazonReviewsClassification (es)":27.63,"AmazonReviewsClassification (fr)":27.54,"AmazonReviewsClassification (ja)":23.57,"AmazonReviewsClassification (zh)":22.99,"MTOPDomainClassification (de)":72.04,"MTOPDomainClassification (es)":72.99,"MTOPDomainClassification (fr)":75.59,"MTOPDomainClassification (hi)":40.36,"MTOPDomainClassification (th)":17.1,"MTOPIntentClassification (de)":43.41,"MTOPIntentClassification (es)":41.88,"MTOPIntentClassification (fr)":38.94,"MTOPIntentClassification (hi)":17.75,"MTOPIntentClassification (th)":5.63,"MassiveIntentClassification (af)":38.94,"MassiveIntentClassification (am)":2.45,"MassiveIntentClassification (ar)":20.94,"MassiveIntentClassification (az)":34.25,"MassiveIntentClassification (bn)":13.67,"MassiveIntentClassification (cy)":35.71,"MassiveIntentClassification (de)":44.17,"MassiveIntentClassification (el)":28.7,"MassiveIntentClassification (es)":40.91,"MassiveIntentClassification (fa)":23.52,"MassiveIntentClassification (fi)":39.27,"MassiveIntentClassification (fr)":44.82,"MassiveIntentClassification (he)":23.65,"MassiveIntentClassification (hi)":17.98,"MassiveIntentClassification (hu)":38.0,"MassiveIntentClassification (hy)":8.69,"MassiveIntentClassification (id)":39.66,"MassiveIntentClassification (is)":35.14,"MassiveIntentClassification (it)":43.17,"MassiveIntentClassification (ja)":30.94,"MassiveIntentClassification (jv)":36.69,"MassiveIntentClassification (ka)":9.17,"MassiveIntentClassification (km)":4.99,"MassiveIntentClassification (kn)":3.08,"MassiveIntentClassification (ko)":19.97,"MassiveIntentClassification (lv)":38.61,"MassiveIntentClassification (ml)":2.85,"MassiveIntentClassification (mn)":23.25,"MassiveIntentClassification (ms)":36.21,"MassiveIntentClassification (my)":4.38,"MassiveIntentClassification (nl)":41.85,"MassiveIntentClassification (pt)":45.12,"MassiveIntentClassification (ro)":41.71,"MassiveIntentClassification (ru)":26.33,"MassiveIntentClassification (sl)":38.52,"MassiveIntentClassification (sq)":41.62,"MassiveIntentClassification (sw)":35.28,"MassiveIntentClassification (ta)":13.1,"MassiveIntentClassification (te)":2.56,"MassiveIntentClassification (th)":10.54,"MassiveIntentClassification (tl)":38.56,"MassiveIntentClassification (tr)":35.9,"MassiveIntentClassification (ur)":16.18,"MassiveIntentClassification (vi)":37.38,"MassiveIntentClassification (zh-TW)":22.39,"MassiveScenarioClassification (af)":45.71,"MassiveScenarioClassification (am)":7.41,"MassiveScenarioClassification (ar)":27.62,"MassiveScenarioClassification (az)":39.58,"MassiveScenarioClassification (bn)":18.98,"MassiveScenarioClassification (cy)":41.4,"MassiveScenarioClassification (de)":52.07,"MassiveScenarioClassification (el)":35.51,"MassiveScenarioClassification (es)":50.74,"MassiveScenarioClassification (fa)":29.0,"MassiveScenarioClassification (fi)":45.8,"MassiveScenarioClassification (fr)":53.76,"MassiveScenarioClassification (he)":25.68,"MassiveScenarioClassification (hi)":23.02,"MassiveScenarioClassification (hu)":44.09,"MassiveScenarioClassification (hy)":14.83,"MassiveScenarioClassification (id)":44.35,"MassiveScenarioClassification (is)":43.08,"MassiveScenarioClassification (it)":51.71,"MassiveScenarioClassification (ja)":36.75,"MassiveScenarioClassification (jv)":44.57,"MassiveScenarioClassification (ka)":14.84,"MassiveScenarioClassification (km)":9.75,"MassiveScenarioClassification (kn)":8.32,"MassiveScenarioClassification (ko)":25.72,"MassiveScenarioClassification (lv)":42.75,"MassiveScenarioClassification (ml)":7.25,"MassiveScenarioClassification (mn)":29.03,"MassiveScenarioClassification (ms)":44.65,"MassiveScenarioClassification (my)":10.07,"MassiveScenarioClassification (nl)":49.15,"MassiveScenarioClassification (pt)":53.0,"MassiveScenarioClassification (ro)":49.97,"MassiveScenarioClassification (ru)":28.75,"MassiveScenarioClassification (sl)":42.26,"MassiveScenarioClassification (sq)":49.14,"MassiveScenarioClassification (sw)":43.18,"MassiveScenarioClassification (ta)":19.38,"MassiveScenarioClassification (te)":7.74,"MassiveScenarioClassification (th)":18.32,"MassiveScenarioClassification (tl)":48.31,"MassiveScenarioClassification (tr)":41.79,"MassiveScenarioClassification (ur)":24.46,"MassiveScenarioClassification (vi)":40.94,"MassiveScenarioClassification (zh-TW)":31.16} -{"index":34,"Rank":18,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":31.78,"AmazonCounterfactualClassification (de)":54.33,"AmazonCounterfactualClassification (ja)":56.34,"AmazonReviewsClassification (de)":27.2,"AmazonReviewsClassification (es)":34.88,"AmazonReviewsClassification (fr)":31.56,"AmazonReviewsClassification (ja)":22.71,"AmazonReviewsClassification (zh)":22.35,"MTOPDomainClassification (de)":74.86,"MTOPDomainClassification (es)":77.09,"MTOPDomainClassification (fr)":79.8,"MTOPDomainClassification (hi)":32.79,"MTOPDomainClassification (th)":16.65,"MTOPIntentClassification (de)":42.36,"MTOPIntentClassification (es)":44.73,"MTOPIntentClassification (fr)":38.96,"MTOPIntentClassification (hi)":13.58,"MTOPIntentClassification (th)":5.4,"MassiveIntentClassification (af)":37.22,"MassiveIntentClassification (am)":3.19,"MassiveIntentClassification (ar)":14.26,"MassiveIntentClassification (az)":37.22,"MassiveIntentClassification (bn)":10.76,"MassiveIntentClassification (cy)":32.5,"MassiveIntentClassification (de)":42.78,"MassiveIntentClassification (el)":33.49,"MassiveIntentClassification (es)":44.45,"MassiveIntentClassification (fa)":26.74,"MassiveIntentClassification (fi)":38.1,"MassiveIntentClassification (fr)":46.89,"MassiveIntentClassification (he)":25.2,"MassiveIntentClassification (hi)":13.94,"MassiveIntentClassification (hu)":34.71,"MassiveIntentClassification (hy)":6.71,"MassiveIntentClassification (id)":38.57,"MassiveIntentClassification (is)":32.23,"MassiveIntentClassification (it)":45.8,"MassiveIntentClassification (ja)":29.19,"MassiveIntentClassification (jv)":34.22,"MassiveIntentClassification (ka)":8.89,"MassiveIntentClassification (km)":4.62,"MassiveIntentClassification (kn)":3.17,"MassiveIntentClassification (ko)":15.03,"MassiveIntentClassification (lv)":36.1,"MassiveIntentClassification (ml)":3.0,"MassiveIntentClassification (mn)":23.3,"MassiveIntentClassification (ms)":36.13,"MassiveIntentClassification (my)":3.81,"MassiveIntentClassification (nl)":41.08,"MassiveIntentClassification (pt)":45.2,"MassiveIntentClassification (ro)":39.49,"MassiveIntentClassification (ru)":31.82,"MassiveIntentClassification (sl)":35.45,"MassiveIntentClassification (sq)":36.89,"MassiveIntentClassification (sw)":37.54,"MassiveIntentClassification (ta)":7.91,"MassiveIntentClassification (te)":2.85,"MassiveIntentClassification (th)":10.5,"MassiveIntentClassification (tl)":39.47,"MassiveIntentClassification (tr)":37.5,"MassiveIntentClassification (ur)":16.11,"MassiveIntentClassification (vi)":36.11,"MassiveIntentClassification (zh-TW)":17.22,"MassiveScenarioClassification (af)":47.8,"MassiveScenarioClassification (am)":7.08,"MassiveScenarioClassification (ar)":22.83,"MassiveScenarioClassification (az)":44.95,"MassiveScenarioClassification (bn)":16.59,"MassiveScenarioClassification (cy)":37.92,"MassiveScenarioClassification (de)":58.74,"MassiveScenarioClassification (el)":43.0,"MassiveScenarioClassification (es)":54.47,"MassiveScenarioClassification (fa)":30.58,"MassiveScenarioClassification (fi)":43.57,"MassiveScenarioClassification (fr)":56.99,"MassiveScenarioClassification (he)":28.08,"MassiveScenarioClassification (hi)":18.1,"MassiveScenarioClassification (hu)":41.74,"MassiveScenarioClassification (hy)":11.54,"MassiveScenarioClassification (id)":46.95,"MassiveScenarioClassification (is)":42.78,"MassiveScenarioClassification (it)":54.65,"MassiveScenarioClassification (ja)":35.9,"MassiveScenarioClassification (jv)":42.51,"MassiveScenarioClassification (ka)":13.8,"MassiveScenarioClassification (km)":9.45,"MassiveScenarioClassification (kn)":8.16,"MassiveScenarioClassification (ko)":19.91,"MassiveScenarioClassification (lv)":40.48,"MassiveScenarioClassification (ml)":6.7,"MassiveScenarioClassification (mn)":28.55,"MassiveScenarioClassification (ms)":46.62,"MassiveScenarioClassification (my)":9.98,"MassiveScenarioClassification (nl)":51.76,"MassiveScenarioClassification (pt)":55.6,"MassiveScenarioClassification (ro)":50.54,"MassiveScenarioClassification (ru)":37.73,"MassiveScenarioClassification (sl)":41.67,"MassiveScenarioClassification (sq)":47.38,"MassiveScenarioClassification (sw)":44.18,"MassiveScenarioClassification (ta)":12.6,"MassiveScenarioClassification (te)":7.02,"MassiveScenarioClassification (th)":19.79,"MassiveScenarioClassification (tl)":50.36,"MassiveScenarioClassification (tr)":45.48,"MassiveScenarioClassification (ur)":23.68,"MassiveScenarioClassification (vi)":41.63,"MassiveScenarioClassification (zh-TW)":27.52} -{"index":101,"Rank":19,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":30.51,"AmazonCounterfactualClassification (de)":59.79,"AmazonCounterfactualClassification (ja)":50.59,"AmazonReviewsClassification (de)":35.06,"AmazonReviewsClassification (es)":37.18,"AmazonReviewsClassification (fr)":35.48,"AmazonReviewsClassification (ja)":22.24,"AmazonReviewsClassification (zh)":21.89,"MTOPDomainClassification (de)":85.42,"MTOPDomainClassification (es)":88.2,"MTOPDomainClassification (fr)":85.05,"MTOPDomainClassification (hi)":21.74,"MTOPDomainClassification (th)":15.87,"MTOPIntentClassification (de)":55.75,"MTOPIntentClassification (es)":57.73,"MTOPIntentClassification (fr)":51.07,"MTOPIntentClassification (hi)":3.19,"MTOPIntentClassification (th)":5.55,"MassiveIntentClassification (af)":42.6,"MassiveIntentClassification (am)":2.12,"MassiveIntentClassification (ar)":4.64,"MassiveIntentClassification (az)":35.05,"MassiveIntentClassification (bn)":2.84,"MassiveIntentClassification (cy)":36.19,"MassiveIntentClassification (de)":55.49,"MassiveIntentClassification (el)":10.14,"MassiveIntentClassification (es)":56.72,"MassiveIntentClassification (fa)":3.54,"MassiveIntentClassification (fi)":37.13,"MassiveIntentClassification (fr)":57.67,"MassiveIntentClassification (he)":2.56,"MassiveIntentClassification (hi)":3.24,"MassiveIntentClassification (hu)":34.22,"MassiveIntentClassification (hy)":3.01,"MassiveIntentClassification (id)":46.54,"MassiveIntentClassification (is)":34.77,"MassiveIntentClassification (it)":54.13,"MassiveIntentClassification (ja)":4.27,"MassiveIntentClassification (jv)":36.97,"MassiveIntentClassification (ka)":2.72,"MassiveIntentClassification (km)":5.35,"MassiveIntentClassification (kn)":3.17,"MassiveIntentClassification (ko)":2.64,"MassiveIntentClassification (lv)":36.32,"MassiveIntentClassification (ml)":3.18,"MassiveIntentClassification (mn)":22.85,"MassiveIntentClassification (ms)":42.87,"MassiveIntentClassification (my)":4.04,"MassiveIntentClassification (nl)":49.53,"MassiveIntentClassification (pt)":57.03,"MassiveIntentClassification (ro)":49.95,"MassiveIntentClassification (ru)":36.58,"MassiveIntentClassification (sl)":39.44,"MassiveIntentClassification (sq)":41.78,"MassiveIntentClassification (sw)":35.85,"MassiveIntentClassification (ta)":2.32,"MassiveIntentClassification (te)":2.2,"MassiveIntentClassification (th)":3.74,"MassiveIntentClassification (tl)":43.12,"MassiveIntentClassification (tr)":35.24,"MassiveIntentClassification (ur)":3.0,"MassiveIntentClassification (vi)":30.01,"MassiveIntentClassification (zh-TW)":3.35,"MassiveScenarioClassification (af)":52.54,"MassiveScenarioClassification (am)":6.3,"MassiveScenarioClassification (ar)":11.96,"MassiveScenarioClassification (az)":40.17,"MassiveScenarioClassification (bn)":8.29,"MassiveScenarioClassification (cy)":42.24,"MassiveScenarioClassification (de)":68.09,"MassiveScenarioClassification (el)":16.66,"MassiveScenarioClassification (es)":64.32,"MassiveScenarioClassification (fa)":6.9,"MassiveScenarioClassification (fi)":43.96,"MassiveScenarioClassification (fr)":66.72,"MassiveScenarioClassification (he)":7.51,"MassiveScenarioClassification (hi)":7.82,"MassiveScenarioClassification (hu)":42.16,"MassiveScenarioClassification (hy)":9.33,"MassiveScenarioClassification (id)":53.54,"MassiveScenarioClassification (is)":42.84,"MassiveScenarioClassification (it)":62.44,"MassiveScenarioClassification (ja)":7.29,"MassiveScenarioClassification (jv)":43.13,"MassiveScenarioClassification (ka)":7.63,"MassiveScenarioClassification (km)":9.08,"MassiveScenarioClassification (kn)":8.1,"MassiveScenarioClassification (ko)":6.35,"MassiveScenarioClassification (lv)":40.24,"MassiveScenarioClassification (ml)":7.65,"MassiveScenarioClassification (mn)":27.98,"MassiveScenarioClassification (ms)":52.41,"MassiveScenarioClassification (my)":9.21,"MassiveScenarioClassification (nl)":60.35,"MassiveScenarioClassification (pt)":62.78,"MassiveScenarioClassification (ro)":59.62,"MassiveScenarioClassification (ru)":43.44,"MassiveScenarioClassification (sl)":44.79,"MassiveScenarioClassification (sq)":50.84,"MassiveScenarioClassification (sw)":44.63,"MassiveScenarioClassification (ta)":7.95,"MassiveScenarioClassification (te)":7.5,"MassiveScenarioClassification (th)":8.79,"MassiveScenarioClassification (tl)":53.54,"MassiveScenarioClassification (tr)":42.47,"MassiveScenarioClassification (ur)":9.58,"MassiveScenarioClassification (vi)":34.68,"MassiveScenarioClassification (zh-TW)":8.77} -{"index":107,"Rank":20,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":29.6,"AmazonCounterfactualClassification (de)":67.01,"AmazonCounterfactualClassification (ja)":45.61,"AmazonReviewsClassification (de)":44.05,"AmazonReviewsClassification (es)":45.01,"AmazonReviewsClassification (fr)":43.52,"AmazonReviewsClassification (ja)":22.23,"AmazonReviewsClassification (zh)":21.88,"MTOPDomainClassification (de)":83.28,"MTOPDomainClassification (es)":85.32,"MTOPDomainClassification (fr)":85.14,"MTOPDomainClassification (hi)":20.85,"MTOPDomainClassification (th)":15.62,"MTOPIntentClassification (de)":54.65,"MTOPIntentClassification (es)":57.38,"MTOPIntentClassification (fr)":54.39,"MTOPIntentClassification (hi)":3.28,"MTOPIntentClassification (th)":5.08,"MassiveIntentClassification (af)":40.17,"MassiveIntentClassification (am)":2.18,"MassiveIntentClassification (ar)":4.18,"MassiveIntentClassification (az)":30.02,"MassiveIntentClassification (bn)":2.6,"MassiveIntentClassification (cy)":29.15,"MassiveIntentClassification (de)":57.43,"MassiveIntentClassification (el)":9.96,"MassiveIntentClassification (es)":57.97,"MassiveIntentClassification (fa)":3.6,"MassiveIntentClassification (fi)":34.02,"MassiveIntentClassification (fr)":60.99,"MassiveIntentClassification (he)":2.51,"MassiveIntentClassification (hi)":3.02,"MassiveIntentClassification (hu)":31.66,"MassiveIntentClassification (hy)":3.32,"MassiveIntentClassification (id)":41.53,"MassiveIntentClassification (is)":30.25,"MassiveIntentClassification (it)":56.57,"MassiveIntentClassification (ja)":3.5,"MassiveIntentClassification (jv)":31.67,"MassiveIntentClassification (ka)":2.79,"MassiveIntentClassification (km)":5.43,"MassiveIntentClassification (kn)":2.79,"MassiveIntentClassification (ko)":2.67,"MassiveIntentClassification (lv)":34.25,"MassiveIntentClassification (ml)":2.98,"MassiveIntentClassification (mn)":20.99,"MassiveIntentClassification (ms)":37.43,"MassiveIntentClassification (my)":4.02,"MassiveIntentClassification (nl)":50.51,"MassiveIntentClassification (pt)":57.95,"MassiveIntentClassification (ro)":49.37,"MassiveIntentClassification (ru)":33.46,"MassiveIntentClassification (sl)":36.33,"MassiveIntentClassification (sq)":37.65,"MassiveIntentClassification (sw)":30.6,"MassiveIntentClassification (ta)":1.79,"MassiveIntentClassification (te)":2.26,"MassiveIntentClassification (th)":4.02,"MassiveIntentClassification (tl)":38.92,"MassiveIntentClassification (tr)":32.05,"MassiveIntentClassification (ur)":2.7,"MassiveIntentClassification (vi)":21.47,"MassiveIntentClassification (zh-TW)":3.24,"MassiveScenarioClassification (af)":50.81,"MassiveScenarioClassification (am)":6.95,"MassiveScenarioClassification (ar)":12.32,"MassiveScenarioClassification (az)":38.79,"MassiveScenarioClassification (bn)":8.0,"MassiveScenarioClassification (cy)":33.91,"MassiveScenarioClassification (de)":65.33,"MassiveScenarioClassification (el)":16.89,"MassiveScenarioClassification (es)":62.52,"MassiveScenarioClassification (fa)":6.08,"MassiveScenarioClassification (fi)":43.34,"MassiveScenarioClassification (fr)":66.42,"MassiveScenarioClassification (he)":7.55,"MassiveScenarioClassification (hi)":7.44,"MassiveScenarioClassification (hu)":40.85,"MassiveScenarioClassification (hy)":9.25,"MassiveScenarioClassification (id)":51.92,"MassiveScenarioClassification (is)":40.09,"MassiveScenarioClassification (it)":62.94,"MassiveScenarioClassification (ja)":7.9,"MassiveScenarioClassification (jv)":41.33,"MassiveScenarioClassification (ka)":7.76,"MassiveScenarioClassification (km)":9.19,"MassiveScenarioClassification (kn)":8.36,"MassiveScenarioClassification (ko)":6.13,"MassiveScenarioClassification (lv)":40.7,"MassiveScenarioClassification (ml)":6.98,"MassiveScenarioClassification (mn)":27.0,"MassiveScenarioClassification (ms)":46.9,"MassiveScenarioClassification (my)":9.55,"MassiveScenarioClassification (nl)":59.65,"MassiveScenarioClassification (pt)":62.18,"MassiveScenarioClassification (ro)":58.22,"MassiveScenarioClassification (ru)":40.73,"MassiveScenarioClassification (sl)":43.66,"MassiveScenarioClassification (sq)":49.25,"MassiveScenarioClassification (sw)":40.55,"MassiveScenarioClassification (ta)":7.46,"MassiveScenarioClassification (te)":7.03,"MassiveScenarioClassification (th)":8.52,"MassiveScenarioClassification (tl)":51.74,"MassiveScenarioClassification (tr)":43.01,"MassiveScenarioClassification (ur)":9.61,"MassiveScenarioClassification (vi)":28.91,"MassiveScenarioClassification (zh-TW)":7.14} -{"index":100,"Rank":21,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":28.9,"AmazonCounterfactualClassification (de)":59.38,"AmazonCounterfactualClassification (ja)":45.87,"AmazonReviewsClassification (de)":33.06,"AmazonReviewsClassification (es)":34.0,"AmazonReviewsClassification (fr)":33.48,"AmazonReviewsClassification (ja)":21.78,"AmazonReviewsClassification (zh)":21.83,"MTOPDomainClassification (de)":81.91,"MTOPDomainClassification (es)":84.7,"MTOPDomainClassification (fr)":82.48,"MTOPDomainClassification (hi)":22.11,"MTOPDomainClassification (th)":16.36,"MTOPIntentClassification (de)":52.13,"MTOPIntentClassification (es)":52.62,"MTOPIntentClassification (fr)":46.39,"MTOPIntentClassification (hi)":3.9,"MTOPIntentClassification (th)":5.38,"MassiveIntentClassification (af)":41.02,"MassiveIntentClassification (am)":2.34,"MassiveIntentClassification (ar)":4.87,"MassiveIntentClassification (az)":34.92,"MassiveIntentClassification (bn)":2.52,"MassiveIntentClassification (cy)":35.87,"MassiveIntentClassification (de)":51.48,"MassiveIntentClassification (el)":10.0,"MassiveIntentClassification (es)":53.3,"MassiveIntentClassification (fa)":3.59,"MassiveIntentClassification (fi)":37.35,"MassiveIntentClassification (fr)":54.83,"MassiveIntentClassification (he)":2.52,"MassiveIntentClassification (hi)":2.88,"MassiveIntentClassification (hu)":33.52,"MassiveIntentClassification (hy)":3.13,"MassiveIntentClassification (id)":40.11,"MassiveIntentClassification (is)":34.77,"MassiveIntentClassification (it)":51.21,"MassiveIntentClassification (ja)":4.75,"MassiveIntentClassification (jv)":35.6,"MassiveIntentClassification (ka)":2.71,"MassiveIntentClassification (km)":5.48,"MassiveIntentClassification (kn)":2.44,"MassiveIntentClassification (ko)":2.59,"MassiveIntentClassification (lv)":38.15,"MassiveIntentClassification (ml)":2.67,"MassiveIntentClassification (mn)":18.47,"MassiveIntentClassification (ms)":35.58,"MassiveIntentClassification (my)":4.35,"MassiveIntentClassification (nl)":45.96,"MassiveIntentClassification (pt)":52.27,"MassiveIntentClassification (ro)":46.39,"MassiveIntentClassification (ru)":16.82,"MassiveIntentClassification (sl)":37.3,"MassiveIntentClassification (sq)":41.73,"MassiveIntentClassification (sw)":35.97,"MassiveIntentClassification (ta)":1.52,"MassiveIntentClassification (te)":2.57,"MassiveIntentClassification (th)":3.94,"MassiveIntentClassification (tl)":41.03,"MassiveIntentClassification (tr)":33.75,"MassiveIntentClassification (ur)":2.57,"MassiveIntentClassification (vi)":25.23,"MassiveIntentClassification (zh-TW)":4.64,"MassiveScenarioClassification (af)":51.48,"MassiveScenarioClassification (am)":7.74,"MassiveScenarioClassification (ar)":12.03,"MassiveScenarioClassification (az)":41.77,"MassiveScenarioClassification (bn)":8.07,"MassiveScenarioClassification (cy)":43.67,"MassiveScenarioClassification (de)":63.63,"MassiveScenarioClassification (el)":16.83,"MassiveScenarioClassification (es)":61.48,"MassiveScenarioClassification (fa)":6.48,"MassiveScenarioClassification (fi)":43.54,"MassiveScenarioClassification (fr)":64.06,"MassiveScenarioClassification (he)":8.03,"MassiveScenarioClassification (hi)":7.5,"MassiveScenarioClassification (hu)":42.59,"MassiveScenarioClassification (hy)":9.22,"MassiveScenarioClassification (id)":48.67,"MassiveScenarioClassification (is)":43.87,"MassiveScenarioClassification (it)":59.83,"MassiveScenarioClassification (ja)":5.62,"MassiveScenarioClassification (jv)":42.18,"MassiveScenarioClassification (ka)":7.52,"MassiveScenarioClassification (km)":9.55,"MassiveScenarioClassification (kn)":8.34,"MassiveScenarioClassification (ko)":6.11,"MassiveScenarioClassification (lv)":43.35,"MassiveScenarioClassification (ml)":7.28,"MassiveScenarioClassification (mn)":23.94,"MassiveScenarioClassification (ms)":45.18,"MassiveScenarioClassification (my)":9.33,"MassiveScenarioClassification (nl)":57.02,"MassiveScenarioClassification (pt)":59.45,"MassiveScenarioClassification (ro)":56.8,"MassiveScenarioClassification (ru)":25.85,"MassiveScenarioClassification (sl)":42.51,"MassiveScenarioClassification (sq)":50.41,"MassiveScenarioClassification (sw)":43.02,"MassiveScenarioClassification (ta)":7.21,"MassiveScenarioClassification (te)":6.9,"MassiveScenarioClassification (th)":8.7,"MassiveScenarioClassification (tl)":51.76,"MassiveScenarioClassification (tr)":42.54,"MassiveScenarioClassification (ur)":9.32,"MassiveScenarioClassification (vi)":31.51,"MassiveScenarioClassification (zh-TW)":8.16} -{"index":106,"Rank":22,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":28.26,"AmazonCounterfactualClassification (de)":67.97,"AmazonCounterfactualClassification (ja)":45.72,"AmazonReviewsClassification (de)":43.16,"AmazonReviewsClassification (es)":42.89,"AmazonReviewsClassification (fr)":41.48,"AmazonReviewsClassification (ja)":22.49,"AmazonReviewsClassification (zh)":22.12,"MTOPDomainClassification (de)":80.56,"MTOPDomainClassification (es)":80.78,"MTOPDomainClassification (fr)":79.6,"MTOPDomainClassification (hi)":21.22,"MTOPDomainClassification (th)":15.82,"MTOPIntentClassification (de)":52.5,"MTOPIntentClassification (es)":52.07,"MTOPIntentClassification (fr)":47.73,"MTOPIntentClassification (hi)":3.74,"MTOPIntentClassification (th)":4.96,"MassiveIntentClassification (af)":38.41,"MassiveIntentClassification (am)":2.49,"MassiveIntentClassification (ar)":4.7,"MassiveIntentClassification (az)":31.77,"MassiveIntentClassification (bn)":2.77,"MassiveIntentClassification (cy)":31.69,"MassiveIntentClassification (de)":52.01,"MassiveIntentClassification (el)":9.74,"MassiveIntentClassification (es)":54.1,"MassiveIntentClassification (fa)":3.86,"MassiveIntentClassification (fi)":34.07,"MassiveIntentClassification (fr)":57.01,"MassiveIntentClassification (he)":2.14,"MassiveIntentClassification (hi)":2.97,"MassiveIntentClassification (hu)":32.01,"MassiveIntentClassification (hy)":3.17,"MassiveIntentClassification (id)":34.55,"MassiveIntentClassification (is)":32.0,"MassiveIntentClassification (it)":52.94,"MassiveIntentClassification (ja)":2.9,"MassiveIntentClassification (jv)":32.42,"MassiveIntentClassification (ka)":2.71,"MassiveIntentClassification (km)":5.5,"MassiveIntentClassification (kn)":2.41,"MassiveIntentClassification (ko)":2.57,"MassiveIntentClassification (lv)":35.09,"MassiveIntentClassification (ml)":2.95,"MassiveIntentClassification (mn)":18.33,"MassiveIntentClassification (ms)":29.69,"MassiveIntentClassification (my)":3.99,"MassiveIntentClassification (nl)":44.95,"MassiveIntentClassification (pt)":51.96,"MassiveIntentClassification (ro)":43.83,"MassiveIntentClassification (ru)":17.32,"MassiveIntentClassification (sl)":33.71,"MassiveIntentClassification (sq)":37.62,"MassiveIntentClassification (sw)":31.9,"MassiveIntentClassification (ta)":1.91,"MassiveIntentClassification (te)":2.54,"MassiveIntentClassification (th)":3.85,"MassiveIntentClassification (tl)":36.83,"MassiveIntentClassification (tr)":33.0,"MassiveIntentClassification (ur)":2.62,"MassiveIntentClassification (vi)":22.81,"MassiveIntentClassification (zh-TW)":3.49,"MassiveScenarioClassification (af)":50.28,"MassiveScenarioClassification (am)":7.15,"MassiveScenarioClassification (ar)":12.12,"MassiveScenarioClassification (az)":39.68,"MassiveScenarioClassification (bn)":8.06,"MassiveScenarioClassification (cy)":38.01,"MassiveScenarioClassification (de)":62.71,"MassiveScenarioClassification (el)":17.19,"MassiveScenarioClassification (es)":59.56,"MassiveScenarioClassification (fa)":6.5,"MassiveScenarioClassification (fi)":41.72,"MassiveScenarioClassification (fr)":63.6,"MassiveScenarioClassification (he)":7.93,"MassiveScenarioClassification (hi)":7.85,"MassiveScenarioClassification (hu)":41.37,"MassiveScenarioClassification (hy)":9.42,"MassiveScenarioClassification (id)":44.88,"MassiveScenarioClassification (is)":40.86,"MassiveScenarioClassification (it)":60.09,"MassiveScenarioClassification (ja)":6.56,"MassiveScenarioClassification (jv)":40.18,"MassiveScenarioClassification (ka)":7.37,"MassiveScenarioClassification (km)":9.56,"MassiveScenarioClassification (kn)":8.4,"MassiveScenarioClassification (ko)":5.96,"MassiveScenarioClassification (lv)":41.44,"MassiveScenarioClassification (ml)":7.47,"MassiveScenarioClassification (mn)":25.36,"MassiveScenarioClassification (ms)":39.69,"MassiveScenarioClassification (my)":9.68,"MassiveScenarioClassification (nl)":56.09,"MassiveScenarioClassification (pt)":57.99,"MassiveScenarioClassification (ro)":56.0,"MassiveScenarioClassification (ru)":27.47,"MassiveScenarioClassification (sl)":41.04,"MassiveScenarioClassification (sq)":49.38,"MassiveScenarioClassification (sw)":40.62,"MassiveScenarioClassification (ta)":7.59,"MassiveScenarioClassification (te)":7.07,"MassiveScenarioClassification (th)":8.52,"MassiveScenarioClassification (tl)":49.89,"MassiveScenarioClassification (tr)":43.08,"MassiveScenarioClassification (ur)":9.31,"MassiveScenarioClassification (vi)":27.46,"MassiveScenarioClassification (zh-TW)":7.24} -{"index":105,"Rank":23,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":26.73,"AmazonCounterfactualClassification (de)":69.98,"AmazonCounterfactualClassification (ja)":46.05,"AmazonReviewsClassification (de)":37.9,"AmazonReviewsClassification (es)":37.33,"AmazonReviewsClassification (fr)":37.35,"AmazonReviewsClassification (ja)":22.29,"AmazonReviewsClassification (zh)":21.53,"MTOPDomainClassification (de)":76.98,"MTOPDomainClassification (es)":73.61,"MTOPDomainClassification (fr)":75.03,"MTOPDomainClassification (hi)":21.4,"MTOPDomainClassification (th)":16.21,"MTOPIntentClassification (de)":44.43,"MTOPIntentClassification (es)":42.03,"MTOPIntentClassification (fr)":43.85,"MTOPIntentClassification (hi)":3.8,"MTOPIntentClassification (th)":5.21,"MassiveIntentClassification (af)":34.32,"MassiveIntentClassification (am)":2.38,"MassiveIntentClassification (ar)":4.53,"MassiveIntentClassification (az)":31.76,"MassiveIntentClassification (bn)":2.58,"MassiveIntentClassification (cy)":28.94,"MassiveIntentClassification (de)":45.23,"MassiveIntentClassification (el)":10.05,"MassiveIntentClassification (es)":45.32,"MassiveIntentClassification (fa)":3.58,"MassiveIntentClassification (fi)":33.52,"MassiveIntentClassification (fr)":51.13,"MassiveIntentClassification (he)":2.63,"MassiveIntentClassification (hi)":2.68,"MassiveIntentClassification (hu)":32.31,"MassiveIntentClassification (hy)":3.33,"MassiveIntentClassification (id)":35.5,"MassiveIntentClassification (is)":29.82,"MassiveIntentClassification (it)":45.59,"MassiveIntentClassification (ja)":3.67,"MassiveIntentClassification (jv)":31.15,"MassiveIntentClassification (ka)":2.77,"MassiveIntentClassification (km)":5.66,"MassiveIntentClassification (kn)":2.59,"MassiveIntentClassification (ko)":2.34,"MassiveIntentClassification (lv)":33.97,"MassiveIntentClassification (ml)":2.55,"MassiveIntentClassification (mn)":14.7,"MassiveIntentClassification (ms)":33.12,"MassiveIntentClassification (my)":4.42,"MassiveIntentClassification (nl)":37.96,"MassiveIntentClassification (pt)":43.35,"MassiveIntentClassification (ro)":42.69,"MassiveIntentClassification (ru)":14.82,"MassiveIntentClassification (sl)":34.54,"MassiveIntentClassification (sq)":38.54,"MassiveIntentClassification (sw)":32.14,"MassiveIntentClassification (ta)":1.41,"MassiveIntentClassification (te)":2.5,"MassiveIntentClassification (th)":3.71,"MassiveIntentClassification (tl)":36.04,"MassiveIntentClassification (tr)":33.77,"MassiveIntentClassification (ur)":2.99,"MassiveIntentClassification (vi)":22.62,"MassiveIntentClassification (zh-TW)":4.63,"MassiveScenarioClassification (af)":44.45,"MassiveScenarioClassification (am)":7.51,"MassiveScenarioClassification (ar)":12.32,"MassiveScenarioClassification (az)":38.41,"MassiveScenarioClassification (bn)":8.45,"MassiveScenarioClassification (cy)":35.04,"MassiveScenarioClassification (de)":59.12,"MassiveScenarioClassification (el)":17.68,"MassiveScenarioClassification (es)":55.61,"MassiveScenarioClassification (fa)":6.86,"MassiveScenarioClassification (fi)":41.34,"MassiveScenarioClassification (fr)":59.92,"MassiveScenarioClassification (he)":7.86,"MassiveScenarioClassification (hi)":7.63,"MassiveScenarioClassification (hu)":41.31,"MassiveScenarioClassification (hy)":9.23,"MassiveScenarioClassification (id)":44.64,"MassiveScenarioClassification (is)":39.63,"MassiveScenarioClassification (it)":54.58,"MassiveScenarioClassification (ja)":4.96,"MassiveScenarioClassification (jv)":40.73,"MassiveScenarioClassification (ka)":7.51,"MassiveScenarioClassification (km)":8.73,"MassiveScenarioClassification (kn)":7.99,"MassiveScenarioClassification (ko)":6.03,"MassiveScenarioClassification (lv)":36.42,"MassiveScenarioClassification (ml)":6.96,"MassiveScenarioClassification (mn)":19.85,"MassiveScenarioClassification (ms)":43.18,"MassiveScenarioClassification (my)":9.46,"MassiveScenarioClassification (nl)":50.0,"MassiveScenarioClassification (pt)":52.24,"MassiveScenarioClassification (ro)":53.7,"MassiveScenarioClassification (ru)":20.69,"MassiveScenarioClassification (sl)":39.79,"MassiveScenarioClassification (sq)":50.16,"MassiveScenarioClassification (sw)":40.48,"MassiveScenarioClassification (ta)":7.47,"MassiveScenarioClassification (te)":6.87,"MassiveScenarioClassification (th)":8.26,"MassiveScenarioClassification (tl)":48.94,"MassiveScenarioClassification (tr)":41.83,"MassiveScenarioClassification (ur)":9.77,"MassiveScenarioClassification (vi)":30.01,"MassiveScenarioClassification (zh-TW)":7.91} -{"index":98,"Rank":24,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":24.49,"AmazonCounterfactualClassification (de)":54.46,"AmazonCounterfactualClassification (ja)":43.87,"AmazonReviewsClassification (de)":24.08,"AmazonReviewsClassification (es)":23.88,"AmazonReviewsClassification (fr)":23.31,"AmazonReviewsClassification (ja)":20.25,"AmazonReviewsClassification (zh)":20.49,"MTOPDomainClassification (de)":48.55,"MTOPDomainClassification (es)":58.39,"MTOPDomainClassification (fr)":54.61,"MTOPDomainClassification (hi)":21.22,"MTOPDomainClassification (th)":14.98,"MTOPIntentClassification (de)":35.55,"MTOPIntentClassification (es)":36.72,"MTOPIntentClassification (fr)":34.71,"MTOPIntentClassification (hi)":4.44,"MTOPIntentClassification (th)":4.67,"MassiveIntentClassification (af)":33.68,"MassiveIntentClassification (am)":2.94,"MassiveIntentClassification (ar)":10.04,"MassiveIntentClassification (az)":30.74,"MassiveIntentClassification (bn)":3.02,"MassiveIntentClassification (cy)":33.94,"MassiveIntentClassification (de)":36.06,"MassiveIntentClassification (el)":27.7,"MassiveIntentClassification (es)":35.6,"MassiveIntentClassification (fa)":17.97,"MassiveIntentClassification (fi)":35.53,"MassiveIntentClassification (fr)":38.41,"MassiveIntentClassification (he)":2.69,"MassiveIntentClassification (hi)":3.43,"MassiveIntentClassification (hu)":34.05,"MassiveIntentClassification (hy)":3.11,"MassiveIntentClassification (id)":40.02,"MassiveIntentClassification (is)":32.63,"MassiveIntentClassification (it)":39.28,"MassiveIntentClassification (ja)":4.95,"MassiveIntentClassification (jv)":34.95,"MassiveIntentClassification (ka)":2.57,"MassiveIntentClassification (km)":4.73,"MassiveIntentClassification (kn)":3.54,"MassiveIntentClassification (ko)":2.68,"MassiveIntentClassification (lv)":37.91,"MassiveIntentClassification (ml)":2.88,"MassiveIntentClassification (mn)":16.94,"MassiveIntentClassification (ms)":36.6,"MassiveIntentClassification (my)":3.96,"MassiveIntentClassification (nl)":33.95,"MassiveIntentClassification (pt)":43.05,"MassiveIntentClassification (ro)":36.2,"MassiveIntentClassification (ru)":25.3,"MassiveIntentClassification (sl)":35.9,"MassiveIntentClassification (sq)":36.6,"MassiveIntentClassification (sw)":34.81,"MassiveIntentClassification (ta)":3.11,"MassiveIntentClassification (te)":2.53,"MassiveIntentClassification (th)":4.38,"MassiveIntentClassification (tl)":35.51,"MassiveIntentClassification (tr)":32.02,"MassiveIntentClassification (ur)":9.61,"MassiveIntentClassification (vi)":37.07,"MassiveIntentClassification (zh-TW)":4.79,"MassiveScenarioClassification (af)":36.17,"MassiveScenarioClassification (am)":7.64,"MassiveScenarioClassification (ar)":15.26,"MassiveScenarioClassification (az)":30.73,"MassiveScenarioClassification (bn)":7.15,"MassiveScenarioClassification (cy)":34.73,"MassiveScenarioClassification (de)":38.62,"MassiveScenarioClassification (el)":27.18,"MassiveScenarioClassification (es)":39.44,"MassiveScenarioClassification (fa)":21.43,"MassiveScenarioClassification (fi)":33.21,"MassiveScenarioClassification (fr)":40.26,"MassiveScenarioClassification (he)":7.42,"MassiveScenarioClassification (hi)":8.06,"MassiveScenarioClassification (hu)":34.54,"MassiveScenarioClassification (hy)":8.61,"MassiveScenarioClassification (id)":40.04,"MassiveScenarioClassification (is)":33.57,"MassiveScenarioClassification (it)":40.1,"MassiveScenarioClassification (ja)":9.96,"MassiveScenarioClassification (jv)":36.11,"MassiveScenarioClassification (ka)":7.13,"MassiveScenarioClassification (km)":9.66,"MassiveScenarioClassification (kn)":7.55,"MassiveScenarioClassification (ko)":7.27,"MassiveScenarioClassification (lv)":37.03,"MassiveScenarioClassification (ml)":7.22,"MassiveScenarioClassification (mn)":21.53,"MassiveScenarioClassification (ms)":37.57,"MassiveScenarioClassification (my)":9.54,"MassiveScenarioClassification (nl)":34.62,"MassiveScenarioClassification (pt)":44.68,"MassiveScenarioClassification (ro)":37.29,"MassiveScenarioClassification (ru)":28.16,"MassiveScenarioClassification (sl)":37.95,"MassiveScenarioClassification (sq)":37.82,"MassiveScenarioClassification (sw)":35.37,"MassiveScenarioClassification (ta)":7.19,"MassiveScenarioClassification (te)":7.29,"MassiveScenarioClassification (th)":9.47,"MassiveScenarioClassification (tl)":37.31,"MassiveScenarioClassification (tr)":34.57,"MassiveScenarioClassification (ur)":16.17,"MassiveScenarioClassification (vi)":35.91,"MassiveScenarioClassification (zh-TW)":10.19} -{"index":0,"Rank":25,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.59,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.05,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":66.09,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.83,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.71,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":1,"Rank":26,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":37.26,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":79.79,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":45.62,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":53.7,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":62.46,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":2,"Rank":27,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":42.15,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":87.68,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":59.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":63.08,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":70.15,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":3,"Rank":28,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.98,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.12,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":62.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.94,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":72.78,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":4,"Rank":29,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.36,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.33,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":60.52,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":68.06,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":74.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":6,"Rank":30,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.18,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":7,"Rank":31,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":35.07,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":75.7,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.76,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":56.03,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":59.3,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":8,"Rank":32,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":52.95,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":9,"Rank":33,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":53.47,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.21,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":93.48,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":80.23,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":76.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":79.1,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":10,"Rank":34,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.53,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":53.98,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":96.69,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":87.47,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":81.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":86.64,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":11,"Rank":35,"Model":"tao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.81,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":12,"Rank":36,"Model":"tao-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.8,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":13,"Rank":37,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.15,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":14,"Rank":38,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":41.94,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":15,"Rank":39,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":41.38,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":16,"Rank":40,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.19,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":53.0,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":97.2,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":93.07,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":79.6,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":82.18,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":17,"Rank":41,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":35.91,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":18,"Rank":42,"Model":"Yinka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.5,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":19,"Rank":43,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":54.11,"AmazonCounterfactualClassification (ja)":53.95,"AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":39.68,"MTOPDomainClassification (de)":57.22,"MTOPDomainClassification (es)":58.4,"MTOPDomainClassification (fr)":54.97,"MTOPDomainClassification (hi)":64.17,"MTOPDomainClassification (th)":70.47,"MTOPIntentClassification (de)":32.18,"MTOPIntentClassification (es)":33.9,"MTOPIntentClassification (fr)":26.69,"MTOPIntentClassification (hi)":38.27,"MTOPIntentClassification (th)":42.73,"MassiveIntentClassification (af)":33.5,"MassiveIntentClassification (am)":19.56,"MassiveIntentClassification (ar)":26.46,"MassiveIntentClassification (az)":31.58,"MassiveIntentClassification (bn)":27.99,"MassiveIntentClassification (cy)":28.26,"MassiveIntentClassification (de)":34.2,"MassiveIntentClassification (el)":26.02,"MassiveIntentClassification (es)":36.37,"MassiveIntentClassification (fa)":48.91,"MassiveIntentClassification (fi)":30.11,"MassiveIntentClassification (fr)":37.53,"MassiveIntentClassification (he)":24.86,"MassiveIntentClassification (hi)":39.14,"MassiveIntentClassification (hu)":31.97,"MassiveIntentClassification (hy)":31.36,"MassiveIntentClassification (id)":37.04,"MassiveIntentClassification (is)":28.61,"MassiveIntentClassification (it)":37.86,"MassiveIntentClassification (ja)":47.9,"MassiveIntentClassification (jv)":29.08,"MassiveIntentClassification (ka)":25.77,"MassiveIntentClassification (km)":23.66,"MassiveIntentClassification (kn)":21.27,"MassiveIntentClassification (ko)":40.42,"MassiveIntentClassification (lv)":30.13,"MassiveIntentClassification (ml)":25.89,"MassiveIntentClassification (mn)":27.71,"MassiveIntentClassification (ms)":33.04,"MassiveIntentClassification (my)":24.19,"MassiveIntentClassification (nl)":39.31,"MassiveIntentClassification (pt)":40.26,"MassiveIntentClassification (ro)":35.42,"MassiveIntentClassification (ru)":39.69,"MassiveIntentClassification (sl)":31.09,"MassiveIntentClassification (sq)":35.15,"MassiveIntentClassification (sw)":27.91,"MassiveIntentClassification (ta)":28.12,"MassiveIntentClassification (te)":26.34,"MassiveIntentClassification (th)":48.24,"MassiveIntentClassification (tl)":32.73,"MassiveIntentClassification (tr)":30.21,"MassiveIntentClassification (ur)":30.28,"MassiveIntentClassification (vi)":40.45,"MassiveIntentClassification (zh-TW)":64.03,"MassiveScenarioClassification (af)":43.53,"MassiveScenarioClassification (am)":25.3,"MassiveScenarioClassification (ar)":34.91,"MassiveScenarioClassification (az)":36.37,"MassiveScenarioClassification (bn)":39.2,"MassiveScenarioClassification (cy)":32.18,"MassiveScenarioClassification (de)":43.92,"MassiveScenarioClassification (el)":35.03,"MassiveScenarioClassification (es)":41.96,"MassiveScenarioClassification (fa)":58.36,"MassiveScenarioClassification (fi)":33.95,"MassiveScenarioClassification (fr)":45.32,"MassiveScenarioClassification (he)":34.06,"MassiveScenarioClassification (hi)":48.77,"MassiveScenarioClassification (hu)":39.92,"MassiveScenarioClassification (hy)":38.09,"MassiveScenarioClassification (id)":45.08,"MassiveScenarioClassification (is)":36.55,"MassiveScenarioClassification (it)":44.38,"MassiveScenarioClassification (ja)":57.02,"MassiveScenarioClassification (jv)":35.51,"MassiveScenarioClassification (ka)":33.41,"MassiveScenarioClassification (km)":30.9,"MassiveScenarioClassification (kn)":26.83,"MassiveScenarioClassification (ko)":49.52,"MassiveScenarioClassification (lv)":34.02,"MassiveScenarioClassification (ml)":34.55,"MassiveScenarioClassification (mn)":34.14,"MassiveScenarioClassification (ms)":42.71,"MassiveScenarioClassification (my)":31.0,"MassiveScenarioClassification (nl)":51.44,"MassiveScenarioClassification (pt)":45.9,"MassiveScenarioClassification (ro)":45.01,"MassiveScenarioClassification (ru)":48.66,"MassiveScenarioClassification (sl)":38.34,"MassiveScenarioClassification (sq)":44.78,"MassiveScenarioClassification (sw)":36.02,"MassiveScenarioClassification (ta)":37.81,"MassiveScenarioClassification (te)":34.6,"MassiveScenarioClassification (th)":57.38,"MassiveScenarioClassification (tl)":39.36,"MassiveScenarioClassification (tr)":36.16,"MassiveScenarioClassification (ur)":36.43,"MassiveScenarioClassification (vi)":47.04,"MassiveScenarioClassification (zh-TW)":71.96} -{"index":20,"Rank":44,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":42.04,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":25.8,"MassiveIntentClassification (am)":3.34,"MassiveIntentClassification (ar)":6.49,"MassiveIntentClassification (az)":24.77,"MassiveIntentClassification (bn)":4.3,"MassiveIntentClassification (cy)":26.39,"MassiveIntentClassification (de)":28.09,"MassiveIntentClassification (el)":19.24,"MassiveIntentClassification (es)":30.62,"MassiveIntentClassification (fa)":7.21,"MassiveIntentClassification (fi)":27.21,"MassiveIntentClassification (fr)":32.64,"MassiveIntentClassification (he)":2.66,"MassiveIntentClassification (hi)":4.59,"MassiveIntentClassification (hu)":25.65,"MassiveIntentClassification (hy)":4.86,"MassiveIntentClassification (id)":29.81,"MassiveIntentClassification (is)":23.53,"MassiveIntentClassification (it)":34.47,"MassiveIntentClassification (ja)":39.4,"MassiveIntentClassification (jv)":28.75,"MassiveIntentClassification (ka)":4.34,"MassiveIntentClassification (km)":6.1,"MassiveIntentClassification (kn)":4.46,"MassiveIntentClassification (ko)":14.16,"MassiveIntentClassification (lv)":29.86,"MassiveIntentClassification (ml)":3.69,"MassiveIntentClassification (mn)":7.86,"MassiveIntentClassification (ms)":28.05,"MassiveIntentClassification (my)":6.98,"MassiveIntentClassification (nl)":32.92,"MassiveIntentClassification (pt)":33.53,"MassiveIntentClassification (ro)":31.32,"MassiveIntentClassification (ru)":11.27,"MassiveIntentClassification (sl)":27.94,"MassiveIntentClassification (sq)":32.9,"MassiveIntentClassification (sw)":29.4,"MassiveIntentClassification (ta)":3.33,"MassiveIntentClassification (te)":3.46,"MassiveIntentClassification (th)":12.98,"MassiveIntentClassification (tl)":30.73,"MassiveIntentClassification (tr)":23.57,"MassiveIntentClassification (ur)":4.98,"MassiveIntentClassification (vi)":21.89,"MassiveIntentClassification (zh-TW)":65.53,"MassiveScenarioClassification (af)":31.55,"MassiveScenarioClassification (am)":7.49,"MassiveScenarioClassification (ar)":15.0,"MassiveScenarioClassification (az)":29.13,"MassiveScenarioClassification (bn)":9.24,"MassiveScenarioClassification (cy)":29.72,"MassiveScenarioClassification (de)":34.68,"MassiveScenarioClassification (el)":28.83,"MassiveScenarioClassification (es)":35.97,"MassiveScenarioClassification (fa)":11.12,"MassiveScenarioClassification (fi)":28.61,"MassiveScenarioClassification (fr)":40.66,"MassiveScenarioClassification (he)":9.01,"MassiveScenarioClassification (hi)":9.92,"MassiveScenarioClassification (hu)":32.07,"MassiveScenarioClassification (hy)":8.44,"MassiveScenarioClassification (id)":34.9,"MassiveScenarioClassification (is)":30.95,"MassiveScenarioClassification (it)":41.06,"MassiveScenarioClassification (ja)":48.73,"MassiveScenarioClassification (jv)":35.09,"MassiveScenarioClassification (ka)":9.29,"MassiveScenarioClassification (km)":11.19,"MassiveScenarioClassification (kn)":10.1,"MassiveScenarioClassification (ko)":19.2,"MassiveScenarioClassification (lv)":32.49,"MassiveScenarioClassification (ml)":6.37,"MassiveScenarioClassification (mn)":13.08,"MassiveScenarioClassification (ms)":39.18,"MassiveScenarioClassification (my)":12.25,"MassiveScenarioClassification (nl)":38.17,"MassiveScenarioClassification (pt)":40.01,"MassiveScenarioClassification (ro)":39.25,"MassiveScenarioClassification (ru)":16.71,"MassiveScenarioClassification (sl)":33.94,"MassiveScenarioClassification (sq)":40.4,"MassiveScenarioClassification (sw)":37.14,"MassiveScenarioClassification (ta)":8.21,"MassiveScenarioClassification (te)":7.97,"MassiveScenarioClassification (th)":21.56,"MassiveScenarioClassification (tl)":36.7,"MassiveScenarioClassification (tr)":28.8,"MassiveScenarioClassification (ur)":10.46,"MassiveScenarioClassification (vi)":27.72,"MassiveScenarioClassification (zh-TW)":71.52} -{"index":21,"Rank":45,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":43.32,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":28.81,"MassiveIntentClassification (am)":3.04,"MassiveIntentClassification (ar)":6.75,"MassiveIntentClassification (az)":27.0,"MassiveIntentClassification (bn)":3.24,"MassiveIntentClassification (cy)":31.18,"MassiveIntentClassification (de)":30.65,"MassiveIntentClassification (el)":18.17,"MassiveIntentClassification (es)":32.53,"MassiveIntentClassification (fa)":8.72,"MassiveIntentClassification (fi)":31.79,"MassiveIntentClassification (fr)":33.16,"MassiveIntentClassification (he)":3.03,"MassiveIntentClassification (hi)":3.61,"MassiveIntentClassification (hu)":30.47,"MassiveIntentClassification (hy)":5.35,"MassiveIntentClassification (id)":32.45,"MassiveIntentClassification (is)":30.12,"MassiveIntentClassification (it)":36.32,"MassiveIntentClassification (ja)":41.09,"MassiveIntentClassification (jv)":30.42,"MassiveIntentClassification (ka)":3.79,"MassiveIntentClassification (km)":6.79,"MassiveIntentClassification (kn)":3.86,"MassiveIntentClassification (ko)":8.82,"MassiveIntentClassification (lv)":30.23,"MassiveIntentClassification (ml)":2.93,"MassiveIntentClassification (mn)":12.61,"MassiveIntentClassification (ms)":30.66,"MassiveIntentClassification (my)":5.85,"MassiveIntentClassification (nl)":34.1,"MassiveIntentClassification (pt)":36.92,"MassiveIntentClassification (ro)":33.01,"MassiveIntentClassification (ru)":10.4,"MassiveIntentClassification (sl)":30.73,"MassiveIntentClassification (sq)":36.98,"MassiveIntentClassification (sw)":31.62,"MassiveIntentClassification (ta)":3.19,"MassiveIntentClassification (te)":2.59,"MassiveIntentClassification (th)":4.61,"MassiveIntentClassification (tl)":32.55,"MassiveIntentClassification (tr)":26.87,"MassiveIntentClassification (ur)":4.23,"MassiveIntentClassification (vi)":29.24,"MassiveIntentClassification (zh-TW)":65.49,"MassiveScenarioClassification (af)":35.41,"MassiveScenarioClassification (am)":9.05,"MassiveScenarioClassification (ar)":14.92,"MassiveScenarioClassification (az)":31.97,"MassiveScenarioClassification (bn)":9.15,"MassiveScenarioClassification (cy)":37.45,"MassiveScenarioClassification (de)":38.33,"MassiveScenarioClassification (el)":24.45,"MassiveScenarioClassification (es)":37.73,"MassiveScenarioClassification (fa)":11.84,"MassiveScenarioClassification (fi)":34.49,"MassiveScenarioClassification (fr)":40.92,"MassiveScenarioClassification (he)":7.64,"MassiveScenarioClassification (hi)":8.64,"MassiveScenarioClassification (hu)":37.25,"MassiveScenarioClassification (hy)":10.91,"MassiveScenarioClassification (id)":36.11,"MassiveScenarioClassification (is)":37.8,"MassiveScenarioClassification (it)":41.68,"MassiveScenarioClassification (ja)":48.38,"MassiveScenarioClassification (jv)":35.2,"MassiveScenarioClassification (ka)":9.9,"MassiveScenarioClassification (km)":12.75,"MassiveScenarioClassification (kn)":10.31,"MassiveScenarioClassification (ko)":14.52,"MassiveScenarioClassification (lv)":33.08,"MassiveScenarioClassification (ml)":7.44,"MassiveScenarioClassification (mn)":17.98,"MassiveScenarioClassification (ms)":37.93,"MassiveScenarioClassification (my)":11.73,"MassiveScenarioClassification (nl)":40.37,"MassiveScenarioClassification (pt)":41.83,"MassiveScenarioClassification (ro)":40.63,"MassiveScenarioClassification (ru)":18.96,"MassiveScenarioClassification (sl)":35.3,"MassiveScenarioClassification (sq)":41.96,"MassiveScenarioClassification (sw)":38.88,"MassiveScenarioClassification (ta)":8.51,"MassiveScenarioClassification (te)":7.35,"MassiveScenarioClassification (th)":10.1,"MassiveScenarioClassification (tl)":35.91,"MassiveScenarioClassification (tr)":32.08,"MassiveScenarioClassification (ur)":10.37,"MassiveScenarioClassification (vi)":33.91,"MassiveScenarioClassification (zh-TW)":71.0} -{"index":22,"Rank":46,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":38.6,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":80.79,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":50.01,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":56.31,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":59.5,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":23,"Rank":47,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.89,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.23,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":61.07,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.94,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":67.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":24,"Rank":48,"Model":"Dmeta-embedding-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":44.93,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":25,"Rank":49,"Model":"Dmeta-embedding-zh-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":44.88,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":26,"Rank":50,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":37.51,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":27,"Rank":51,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":38.25,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":28,"Rank":52,"Model":"IYun-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.67,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":29,"Rank":53,"Model":"mist-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":39.32,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":30,"Rank":54,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":33.77,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":31,"Rank":55,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.38,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.65,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.87,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.28,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.5,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":32,"Rank":56,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.35,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.7,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.85,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.28,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":33,"Rank":57,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.39,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.63,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.86,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.3,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":35,"Rank":58,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":40.35,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.83,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":59.13,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":64.99,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.72,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":36,"Rank":59,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.42,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.33,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":62.85,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.81,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":72.99,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":37,"Rank":60,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":44.11,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":87.82,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.63,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.14,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":72.74,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":38,"Rank":61,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.48,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":84.19,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.35,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":64.57,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.04,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":41,"Rank":62,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":66.64,"AmazonCounterfactualClassification (ja)":58.06,"AmazonReviewsClassification (de)":35.29,"AmazonReviewsClassification (es)":38.34,"AmazonReviewsClassification (fr)":37.84,"AmazonReviewsClassification (ja)":30.94,"AmazonReviewsClassification (zh)":33.75,"MTOPDomainClassification (de)":84.54,"MTOPDomainClassification (es)":86.46,"MTOPDomainClassification (fr)":81.32,"MTOPDomainClassification (hi)":58.23,"MTOPDomainClassification (th)":72.29,"MTOPIntentClassification (de)":60.52,"MTOPIntentClassification (es)":64.32,"MTOPIntentClassification (fr)":58.67,"MTOPIntentClassification (hi)":41.96,"MTOPIntentClassification (th)":55.28,"MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":42,"Rank":63,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":42.08,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":89.26,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":68.55,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":67.4,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":43,"Rank":64,"Model":"alime-embedding-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.95,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":44,"Rank":65,"Model":"stella-mrl-large-zh-v3.5-1792d-1024<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.59,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":45,"Rank":66,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":34.79,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.52,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.12,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":59.41,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":65.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":47,"Rank":67,"Model":"acge_text_embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":48.54,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":48,"Rank":68,"Model":"sgpt-bloom-1b7-nli<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":39.29,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":37.63,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":83.8,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.36,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":64.58,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.6,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":50,"Rank":69,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.53,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":53.98,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":96.69,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":87.47,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":81.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":86.64,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":51,"Rank":70,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.03,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":77.1,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":43.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":51.59,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":61.28,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":52,"Rank":71,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":37.97,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.74,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.62,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":53,"Rank":72,"Model":"stella-large-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.32,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":54,"Rank":73,"Model":"stella-mrl-large-zh-v3.5-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.59,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":56,"Rank":74,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":24.9,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":25.55,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":9.49,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":6.98,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":11.41,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":57,"Rank":75,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":23.52,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":27.74,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":8.61,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":6.24,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":10.98,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":58,"Rank":76,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":22.45,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":24.27,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":9.79,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":16.41,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":22.72,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":59,"Rank":77,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.39,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.61,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.84,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.3,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":60,"Rank":78,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.02,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":64.49,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":39.4,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":38.01,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":43.63,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":61,"Rank":79,"Model":"zpoint_large_embedding_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.72,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":62,"Rank":80,"Model":"stella-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.25,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":63,"Rank":81,"Model":"stella-base-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":39.64,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":64,"Rank":82,"Model":"stella-base-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.34,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":65,"Rank":83,"Model":"stella-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.57,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":66,"Rank":84,"Model":"stella-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.82,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":74,"Rank":85,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":68.92,"AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":37.72,"AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":88.37,"MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":63.83,"MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":63.89,"MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":71.25,"MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":75,"Rank":86,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":38.68,"AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":89.89,"MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":68.76,"MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":66.93,"MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":71.23,"MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":76,"Rank":87,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.94,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":77,"Rank":88,"Model":"xiaobu-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.72,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":78,"Rank":89,"Model":"xiaobu-embedding-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":50.07,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":79,"Rank":90,"Model":"lim_base_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.67,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":80,"Rank":91,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":42.33,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.39,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":60.88,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.7,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":74.58,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":81,"Rank":92,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.62,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.7,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":67.69,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.86,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.6,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":82,"Rank":93,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.48,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.96,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":67.76,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.3,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.7,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":83,"Rank":94,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":35.11,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.52,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":65.93,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.48,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":84,"Rank":95,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":34.62,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.97,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":62.59,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.67,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.61,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":85,"Rank":96,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":43.02,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":86,"Rank":97,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":44.44,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":87,"Rank":98,"Model":"m3e-ernie-xbase-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.79,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":88,"Rank":99,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":53.47,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.21,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":93.48,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":80.23,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":76.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":79.1,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":89,"Rank":100,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.53,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":53.98,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":96.69,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":87.47,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":81.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":86.64,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":90,"Rank":101,"Model":"mmarco-bert-base-italian-uncased<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":55.06,"MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":63.04,"MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":91,"Rank":102,"Model":"mmarco-sentence-flare-it<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":22.3,"MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":27.41,"MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":92,"Rank":103,"Model":"stsbm-sentence-flare-it<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":38.88,"MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":43.3,"MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":93,"Rank":104,"Model":"piccolo-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.24,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":94,"Rank":105,"Model":"piccolo-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.33,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":95,"Rank":106,"Model":"piccolo-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.44,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":102,"Rank":107,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":27.05,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":72.97,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.18,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":42.64,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":49.92,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":103,"Rank":108,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AmazonCounterfactualClassification (de)":68.35,"AmazonCounterfactualClassification (ja)":63.45,"AmazonReviewsClassification (de)":35.91,"AmazonReviewsClassification (es)":37.49,"AmazonReviewsClassification (fr)":35.3,"AmazonReviewsClassification (ja)":33.24,"AmazonReviewsClassification (zh)":35.26,"MTOPDomainClassification (de)":79.2,"MTOPDomainClassification (es)":83.04,"MTOPDomainClassification (fr)":78.63,"MTOPDomainClassification (hi)":81.36,"MTOPDomainClassification (th)":79.99,"MTOPIntentClassification (de)":54.23,"MTOPIntentClassification (es)":60.28,"MTOPIntentClassification (fr)":54.05,"MTOPIntentClassification (hi)":59.9,"MTOPIntentClassification (th)":61.96,"MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":57.52,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":64.52,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":104,"Rank":109,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":69.95,"AmazonCounterfactualClassification (ja)":69.79,"AmazonReviewsClassification (de)":39.52,"AmazonReviewsClassification (es)":39.99,"AmazonReviewsClassification (fr)":39.0,"AmazonReviewsClassification (ja)":36.64,"AmazonReviewsClassification (zh)":37.74,"MTOPDomainClassification (de)":85.73,"MTOPDomainClassification (es)":86.96,"MTOPDomainClassification (fr)":81.21,"MTOPDomainClassification (hi)":84.76,"MTOPDomainClassification (th)":82.51,"MTOPIntentClassification (de)":61.27,"MTOPIntentClassification (es)":66.59,"MTOPIntentClassification (fr)":59.76,"MTOPIntentClassification (hi)":62.37,"MTOPIntentClassification (th)":64.8,"MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":61.88,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":67.9,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":108,"Rank":110,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":46.09,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.2,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.33,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.91,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":68.53,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":109,"Rank":111,"Model":"ZNV-Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":29.75,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":110,"Rank":112,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":29.85,"MassiveIntentClassification (am)":2.13,"MassiveIntentClassification (ar)":3.41,"MassiveIntentClassification (az)":22.73,"MassiveIntentClassification (bn)":2.87,"MassiveIntentClassification (cy)":29.43,"MassiveIntentClassification (de)":30.85,"MassiveIntentClassification (el)":8.74,"MassiveIntentClassification (es)":30.63,"MassiveIntentClassification (fa)":3.16,"MassiveIntentClassification (fi)":30.4,"MassiveIntentClassification (fr)":30.84,"MassiveIntentClassification (he)":2.06,"MassiveIntentClassification (hi)":2.43,"MassiveIntentClassification (hu)":24.87,"MassiveIntentClassification (hy)":2.67,"MassiveIntentClassification (id)":32.7,"MassiveIntentClassification (is)":24.13,"MassiveIntentClassification (it)":34.58,"MassiveIntentClassification (ja)":5.99,"MassiveIntentClassification (jv)":27.6,"MassiveIntentClassification (ka)":2.14,"MassiveIntentClassification (km)":4.38,"MassiveIntentClassification (kn)":2.1,"MassiveIntentClassification (ko)":2.36,"MassiveIntentClassification (lv)":22.06,"MassiveIntentClassification (ml)":2.29,"MassiveIntentClassification (mn)":28.51,"MassiveIntentClassification (ms)":28.16,"MassiveIntentClassification (my)":3.97,"MassiveIntentClassification (nl)":30.51,"MassiveIntentClassification (pt)":33.85,"MassiveIntentClassification (ro)":30.47,"MassiveIntentClassification (ru)":58.06,"MassiveIntentClassification (sl)":29.64,"MassiveIntentClassification (sq)":31.7,"MassiveIntentClassification (sw)":27.52,"MassiveIntentClassification (ta)":1.38,"MassiveIntentClassification (te)":2.04,"MassiveIntentClassification (th)":3.79,"MassiveIntentClassification (tl)":31.44,"MassiveIntentClassification (tr)":26.22,"MassiveIntentClassification (ur)":2.55,"MassiveIntentClassification (vi)":23.1,"MassiveIntentClassification (zh-TW)":6.3,"MassiveScenarioClassification (af)":39.37,"MassiveScenarioClassification (am)":7.51,"MassiveScenarioClassification (ar)":11.36,"MassiveScenarioClassification (az)":29.62,"MassiveScenarioClassification (bn)":8.79,"MassiveScenarioClassification (cy)":38.93,"MassiveScenarioClassification (de)":40.66,"MassiveScenarioClassification (el)":16.44,"MassiveScenarioClassification (es)":36.28,"MassiveScenarioClassification (fa)":6.8,"MassiveScenarioClassification (fi)":34.5,"MassiveScenarioClassification (fr)":42.42,"MassiveScenarioClassification (he)":7.95,"MassiveScenarioClassification (hi)":7.51,"MassiveScenarioClassification (hu)":35.04,"MassiveScenarioClassification (hy)":8.53,"MassiveScenarioClassification (id)":39.6,"MassiveScenarioClassification (is)":32.61,"MassiveScenarioClassification (it)":41.2,"MassiveScenarioClassification (ja)":11.21,"MassiveScenarioClassification (jv)":36.25,"MassiveScenarioClassification (ka)":6.59,"MassiveScenarioClassification (km)":8.15,"MassiveScenarioClassification (kn)":8.05,"MassiveScenarioClassification (ko)":5.62,"MassiveScenarioClassification (lv)":28.47,"MassiveScenarioClassification (ml)":7.35,"MassiveScenarioClassification (mn)":33.48,"MassiveScenarioClassification (ms)":38.85,"MassiveScenarioClassification (my)":11.23,"MassiveScenarioClassification (nl)":38.92,"MassiveScenarioClassification (pt)":40.23,"MassiveScenarioClassification (ro)":39.78,"MassiveScenarioClassification (ru)":64.15,"MassiveScenarioClassification (sl)":35.34,"MassiveScenarioClassification (sq)":42.07,"MassiveScenarioClassification (sw)":35.33,"MassiveScenarioClassification (ta)":7.21,"MassiveScenarioClassification (te)":6.86,"MassiveScenarioClassification (th)":8.25,"MassiveScenarioClassification (tl)":38.17,"MassiveScenarioClassification (tr)":33.85,"MassiveScenarioClassification (ur)":8.74,"MassiveScenarioClassification (vi)":31.94,"MassiveScenarioClassification (zh-TW)":11.68} -{"index":111,"Rank":113,"Model":"AGE_Hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.68,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":112,"Rank":114,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.12,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":114,"Rank":115,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.46,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":115,"Rank":116,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.94,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":116,"Rank":117,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":34.25,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":71.83,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":44.53,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":51.93,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":58.31,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":117,"Rank":118,"Model":"gte-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":45.82,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":118,"Rank":119,"Model":"gte-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.23,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":119,"Rank":120,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":38.69,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":120,"Rank":121,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":21.96,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":121,"Rank":122,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":33.51,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.5,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":53.98,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":61.19,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":70.22,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":122,"Rank":123,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":35.09,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.19,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.64,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.8,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":73.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":123,"Rank":124,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":26.75,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":43.83,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":19.38,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":13.58,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":23.21,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":124,"Rank":125,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":26.62,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":36.77,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":15.37,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":15.82,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":23.92,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":125,"Rank":126,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.51,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.33,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":59.01,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.15,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.94,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":126,"Rank":127,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.76,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":38.3,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":89.38,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":64.45,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.42,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.11,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"index":127,"Rank":128,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":48.3,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":1,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":60.56,"AmazonCounterfactualClassification (de)":73.17,"AmazonCounterfactualClassification (ja)":76.42,"AmazonReviewsClassification (de)":39.92,"AmazonReviewsClassification (es)":39.39,"AmazonReviewsClassification (fr)":38.52,"AmazonReviewsClassification (ja)":36.44,"AmazonReviewsClassification (zh)":36.45,"MTOPDomainClassification (de)":86.95,"MTOPDomainClassification (es)":84.07,"MTOPDomainClassification (fr)":84.14,"MTOPDomainClassification (hi)":85.11,"MTOPDomainClassification (th)":81.24,"MTOPIntentClassification (de)":63.42,"MTOPIntentClassification (es)":64.44,"MTOPIntentClassification (fr)":62.01,"MTOPIntentClassification (hi)":62.58,"MTOPIntentClassification (th)":64.61,"MassiveIntentClassification (af)":56.12,"MassiveIntentClassification (am)":55.71,"MassiveIntentClassification (ar)":50.86,"MassiveIntentClassification (az)":58.97,"MassiveIntentClassification (bn)":58.22,"MassiveIntentClassification (cy)":50.16,"MassiveIntentClassification (de)":56.21,"MassiveIntentClassification (el)":57.03,"MassiveIntentClassification (es)":58.32,"MassiveIntentClassification (fa)":62.33,"MassiveIntentClassification (fi)":60.12,"MassiveIntentClassification (fr)":60.47,"MassiveIntentClassification (he)":56.55,"MassiveIntentClassification (hi)":59.4,"MassiveIntentClassification (hu)":59.52,"MassiveIntentClassification (hy)":56.2,"MassiveIntentClassification (id)":61.12,"MassiveIntentClassification (is)":54.9,"MassiveIntentClassification (it)":59.83,"MassiveIntentClassification (ja)":63.11,"MassiveIntentClassification (jv)":50.98,"MassiveIntentClassification (ka)":48.35,"MassiveIntentClassification (km)":48.55,"MassiveIntentClassification (kn)":56.24,"MassiveIntentClassification (ko)":60.99,"MassiveIntentClassification (lv)":57.1,"MassiveIntentClassification (ml)":57.91,"MassiveIntentClassification (mn)":58.5,"MassiveIntentClassification (ms)":58.6,"MassiveIntentClassification (my)":57.35,"MassiveIntentClassification (nl)":59.37,"MassiveIntentClassification (pt)":60.16,"MassiveIntentClassification (ro)":57.92,"MassiveIntentClassification (ru)":60.67,"MassiveIntentClassification (sl)":59.37,"MassiveIntentClassification (sq)":58.03,"MassiveIntentClassification (sw)":51.62,"MassiveIntentClassification (ta)":55.04,"MassiveIntentClassification (te)":58.32,"MassiveIntentClassification (th)":56.58,"MassiveIntentClassification (tl)":55.28,"MassiveIntentClassification (tr)":60.91,"MassiveIntentClassification (ur)":56.7,"MassiveIntentClassification (vi)":56.67,"MassiveIntentClassification (zh-TW)":59.51,"MassiveScenarioClassification (af)":63.39,"MassiveScenarioClassification (am)":62.02,"MassiveScenarioClassification (ar)":57.72,"MassiveScenarioClassification (az)":63.48,"MassiveScenarioClassification (bn)":61.84,"MassiveScenarioClassification (cy)":56.13,"MassiveScenarioClassification (de)":62.39,"MassiveScenarioClassification (el)":64.58,"MassiveScenarioClassification (es)":63.61,"MassiveScenarioClassification (fa)":67.46,"MassiveScenarioClassification (fi)":64.58,"MassiveScenarioClassification (fr)":65.1,"MassiveScenarioClassification (he)":63.53,"MassiveScenarioClassification (hi)":64.4,"MassiveScenarioClassification (hu)":65.82,"MassiveScenarioClassification (hy)":61.25,"MassiveScenarioClassification (id)":65.84,"MassiveScenarioClassification (is)":61.94,"MassiveScenarioClassification (it)":64.09,"MassiveScenarioClassification (ja)":67.72,"MassiveScenarioClassification (jv)":58.29,"MassiveScenarioClassification (ka)":53.38,"MassiveScenarioClassification (km)":56.18,"MassiveScenarioClassification (kn)":61.74,"MassiveScenarioClassification (ko)":67.26,"MassiveScenarioClassification (lv)":61.87,"MassiveScenarioClassification (ml)":62.26,"MassiveScenarioClassification (mn)":62.6,"MassiveScenarioClassification (ms)":65.63,"MassiveScenarioClassification (my)":62.94,"MassiveScenarioClassification (nl)":65.16,"MassiveScenarioClassification (pt)":63.28,"MassiveScenarioClassification (ro)":62.41,"MassiveScenarioClassification (ru)":65.25,"MassiveScenarioClassification (sl)":64.25,"MassiveScenarioClassification (sq)":64.54,"MassiveScenarioClassification (sw)":58.36,"MassiveScenarioClassification (ta)":59.08,"MassiveScenarioClassification (te)":64.13,"MassiveScenarioClassification (th)":64.34,"MassiveScenarioClassification (tl)":60.23,"MassiveScenarioClassification (tr)":65.43,"MassiveScenarioClassification (ur)":61.52,"MassiveScenarioClassification (vi)":61.05,"MassiveScenarioClassification (zh-TW)":67.08} +{"Rank":2,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":44.78,"AmazonCounterfactualClassification (de)":68.14,"AmazonCounterfactualClassification (ja)":65.39,"AmazonReviewsClassification (de)":35.03,"AmazonReviewsClassification (es)":36.24,"AmazonReviewsClassification (fr)":35.7,"AmazonReviewsClassification (ja)":31.08,"AmazonReviewsClassification (zh)":33.89,"MTOPDomainClassification (de)":86.19,"MTOPDomainClassification (es)":87.75,"MTOPDomainClassification (fr)":84.61,"MTOPDomainClassification (hi)":76.41,"MTOPDomainClassification (th)":73.62,"MTOPIntentClassification (de)":59.21,"MTOPIntentClassification (es)":57.21,"MTOPIntentClassification (fr)":53.41,"MTOPIntentClassification (hi)":45.54,"MTOPIntentClassification (th)":47.73,"MassiveIntentClassification (af)":40.02,"MassiveIntentClassification (am)":2.35,"MassiveIntentClassification (ar)":43.14,"MassiveIntentClassification (az)":25.6,"MassiveIntentClassification (bn)":4.84,"MassiveIntentClassification (cy)":15.43,"MassiveIntentClassification (de)":51.57,"MassiveIntentClassification (el)":49.65,"MassiveIntentClassification (es)":56.57,"MassiveIntentClassification (fa)":55.36,"MassiveIntentClassification (fi)":45.72,"MassiveIntentClassification (fr)":57.02,"MassiveIntentClassification (he)":46.74,"MassiveIntentClassification (hi)":48.55,"MassiveIntentClassification (hu)":50.65,"MassiveIntentClassification (hy)":40.79,"MassiveIntentClassification (id)":56.0,"MassiveIntentClassification (is)":16.08,"MassiveIntentClassification (it)":57.65,"MassiveIntentClassification (ja)":55.33,"MassiveIntentClassification (jv)":28.16,"MassiveIntentClassification (ka)":29.41,"MassiveIntentClassification (km)":4.79,"MassiveIntentClassification (kn)":3.37,"MassiveIntentClassification (ko)":49.97,"MassiveIntentClassification (lv)":44.31,"MassiveIntentClassification (ml)":3.24,"MassiveIntentClassification (mn)":40.37,"MassiveIntentClassification (ms)":47.97,"MassiveIntentClassification (my)":38.48,"MassiveIntentClassification (nl)":58.29,"MassiveIntentClassification (pt)":58.63,"MassiveIntentClassification (ro)":50.63,"MassiveIntentClassification (ru)":57.96,"MassiveIntentClassification (sl)":50.66,"MassiveIntentClassification (sq)":50.25,"MassiveIntentClassification (sw)":19.29,"MassiveIntentClassification (ta)":3.79,"MassiveIntentClassification (te)":3.36,"MassiveIntentClassification (th)":45.28,"MassiveIntentClassification (tl)":28.44,"MassiveIntentClassification (tr)":50.47,"MassiveIntentClassification (ur)":46.03,"MassiveIntentClassification (vi)":45.25,"MassiveIntentClassification (zh-TW)":54.96,"MassiveScenarioClassification (af)":53.67,"MassiveScenarioClassification (am)":7.72,"MassiveScenarioClassification (ar)":52.19,"MassiveScenarioClassification (az)":34.75,"MassiveScenarioClassification (bn)":10.65,"MassiveScenarioClassification (cy)":21.24,"MassiveScenarioClassification (de)":61.4,"MassiveScenarioClassification (el)":60.68,"MassiveScenarioClassification (es)":64.61,"MassiveScenarioClassification (fa)":59.24,"MassiveScenarioClassification (fi)":54.66,"MassiveScenarioClassification (fr)":65.2,"MassiveScenarioClassification (he)":54.74,"MassiveScenarioClassification (hi)":55.99,"MassiveScenarioClassification (hu)":61.2,"MassiveScenarioClassification (hy)":49.63,"MassiveScenarioClassification (id)":65.25,"MassiveScenarioClassification (is)":22.6,"MassiveScenarioClassification (it)":64.63,"MassiveScenarioClassification (ja)":62.32,"MassiveScenarioClassification (jv)":35.77,"MassiveScenarioClassification (ka)":39.08,"MassiveScenarioClassification (km)":9.24,"MassiveScenarioClassification (kn)":8.28,"MassiveScenarioClassification (ko)":57.6,"MassiveScenarioClassification (lv)":51.72,"MassiveScenarioClassification (ml)":8.25,"MassiveScenarioClassification (mn)":47.21,"MassiveScenarioClassification (ms)":55.65,"MassiveScenarioClassification (my)":43.31,"MassiveScenarioClassification (nl)":67.49,"MassiveScenarioClassification (pt)":64.26,"MassiveScenarioClassification (ro)":58.03,"MassiveScenarioClassification (ru)":65.41,"MassiveScenarioClassification (sl)":59.36,"MassiveScenarioClassification (sq)":62.69,"MassiveScenarioClassification (sw)":25.12,"MassiveScenarioClassification (ta)":8.67,"MassiveScenarioClassification (te)":7.82,"MassiveScenarioClassification (th)":54.65,"MassiveScenarioClassification (tl)":36.09,"MassiveScenarioClassification (tr)":60.89,"MassiveScenarioClassification (ur)":54.71,"MassiveScenarioClassification (vi)":55.15,"MassiveScenarioClassification (zh-TW)":62.89} +{"Rank":3,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":42.45,"AmazonCounterfactualClassification (de)":67.82,"AmazonCounterfactualClassification (ja)":68.76,"AmazonReviewsClassification (de)":31.07,"AmazonReviewsClassification (es)":32.72,"AmazonReviewsClassification (fr)":31.12,"AmazonReviewsClassification (ja)":28.94,"AmazonReviewsClassification (zh)":30.89,"MTOPDomainClassification (de)":74.08,"MTOPDomainClassification (es)":73.47,"MTOPDomainClassification (fr)":72.26,"MTOPDomainClassification (hi)":72.95,"MTOPDomainClassification (th)":72.68,"MTOPIntentClassification (de)":51.62,"MTOPIntentClassification (es)":52.75,"MTOPIntentClassification (fr)":50.12,"MTOPIntentClassification (hi)":45.55,"MTOPIntentClassification (th)":50.07,"MassiveIntentClassification (af)":38.01,"MassiveIntentClassification (am)":12.7,"MassiveIntentClassification (ar)":37.16,"MassiveIntentClassification (az)":19.98,"MassiveIntentClassification (bn)":42.51,"MassiveIntentClassification (cy)":17.33,"MassiveIntentClassification (de)":44.79,"MassiveIntentClassification (el)":46.71,"MassiveIntentClassification (es)":45.44,"MassiveIntentClassification (fa)":45.01,"MassiveIntentClassification (fi)":45.94,"MassiveIntentClassification (fr)":46.13,"MassiveIntentClassification (he)":42.55,"MassiveIntentClassification (hi)":40.2,"MassiveIntentClassification (hu)":42.77,"MassiveIntentClassification (hy)":28.07,"MassiveIntentClassification (id)":45.81,"MassiveIntentClassification (is)":39.86,"MassiveIntentClassification (it)":48.25,"MassiveIntentClassification (ja)":45.3,"MassiveIntentClassification (jv)":24.3,"MassiveIntentClassification (ka)":22.7,"MassiveIntentClassification (km)":22.48,"MassiveIntentClassification (kn)":4.32,"MassiveIntentClassification (ko)":44.26,"MassiveIntentClassification (lv)":39.75,"MassiveIntentClassification (ml)":41.33,"MassiveIntentClassification (mn)":16.2,"MassiveIntentClassification (ms)":43.23,"MassiveIntentClassification (my)":25.37,"MassiveIntentClassification (nl)":45.0,"MassiveIntentClassification (pt)":48.55,"MassiveIntentClassification (ro)":44.3,"MassiveIntentClassification (ru)":44.29,"MassiveIntentClassification (sl)":44.72,"MassiveIntentClassification (sq)":46.12,"MassiveIntentClassification (sw)":31.89,"MassiveIntentClassification (ta)":29.63,"MassiveIntentClassification (te)":36.03,"MassiveIntentClassification (th)":43.39,"MassiveIntentClassification (tl)":29.73,"MassiveIntentClassification (tr)":43.93,"MassiveIntentClassification (ur)":26.11,"MassiveIntentClassification (vi)":44.33,"MassiveIntentClassification (zh-TW)":32.93,"MassiveScenarioClassification (af)":47.1,"MassiveScenarioClassification (am)":17.7,"MassiveScenarioClassification (ar)":45.21,"MassiveScenarioClassification (az)":28.21,"MassiveScenarioClassification (bn)":50.52,"MassiveScenarioClassification (cy)":22.58,"MassiveScenarioClassification (de)":54.34,"MassiveScenarioClassification (el)":55.47,"MassiveScenarioClassification (es)":52.77,"MassiveScenarioClassification (fa)":52.5,"MassiveScenarioClassification (fi)":52.63,"MassiveScenarioClassification (fr)":54.32,"MassiveScenarioClassification (he)":52.41,"MassiveScenarioClassification (hi)":47.37,"MassiveScenarioClassification (hu)":53.43,"MassiveScenarioClassification (hy)":33.57,"MassiveScenarioClassification (id)":54.38,"MassiveScenarioClassification (is)":49.78,"MassiveScenarioClassification (it)":54.84,"MassiveScenarioClassification (ja)":54.12,"MassiveScenarioClassification (jv)":32.71,"MassiveScenarioClassification (ka)":26.92,"MassiveScenarioClassification (km)":27.23,"MassiveScenarioClassification (kn)":10.06,"MassiveScenarioClassification (ko)":52.01,"MassiveScenarioClassification (lv)":44.82,"MassiveScenarioClassification (ml)":49.1,"MassiveScenarioClassification (mn)":21.51,"MassiveScenarioClassification (ms)":53.6,"MassiveScenarioClassification (my)":29.72,"MassiveScenarioClassification (nl)":53.33,"MassiveScenarioClassification (pt)":53.41,"MassiveScenarioClassification (ro)":50.48,"MassiveScenarioClassification (ru)":51.84,"MassiveScenarioClassification (sl)":51.29,"MassiveScenarioClassification (sq)":55.65,"MassiveScenarioClassification (sw)":42.04,"MassiveScenarioClassification (ta)":36.72,"MassiveScenarioClassification (te)":42.08,"MassiveScenarioClassification (th)":52.15,"MassiveScenarioClassification (tl)":37.34,"MassiveScenarioClassification (tr)":52.56,"MassiveScenarioClassification (ur)":32.6,"MassiveScenarioClassification (vi)":50.97,"MassiveScenarioClassification (zh-TW)":42.32} +{"Rank":4,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":31.93,"AmazonCounterfactualClassification (de)":57.1,"AmazonCounterfactualClassification (ja)":59.91,"AmazonReviewsClassification (de)":25.91,"AmazonReviewsClassification (es)":27.63,"AmazonReviewsClassification (fr)":27.54,"AmazonReviewsClassification (ja)":23.57,"AmazonReviewsClassification (zh)":22.99,"MTOPDomainClassification (de)":72.04,"MTOPDomainClassification (es)":72.99,"MTOPDomainClassification (fr)":75.59,"MTOPDomainClassification (hi)":40.36,"MTOPDomainClassification (th)":17.1,"MTOPIntentClassification (de)":43.41,"MTOPIntentClassification (es)":41.88,"MTOPIntentClassification (fr)":38.94,"MTOPIntentClassification (hi)":17.75,"MTOPIntentClassification (th)":5.63,"MassiveIntentClassification (af)":38.94,"MassiveIntentClassification (am)":2.45,"MassiveIntentClassification (ar)":20.94,"MassiveIntentClassification (az)":34.25,"MassiveIntentClassification (bn)":13.67,"MassiveIntentClassification (cy)":35.71,"MassiveIntentClassification (de)":44.17,"MassiveIntentClassification (el)":28.7,"MassiveIntentClassification (es)":40.91,"MassiveIntentClassification (fa)":23.52,"MassiveIntentClassification (fi)":39.27,"MassiveIntentClassification (fr)":44.82,"MassiveIntentClassification (he)":23.65,"MassiveIntentClassification (hi)":17.98,"MassiveIntentClassification (hu)":38.0,"MassiveIntentClassification (hy)":8.69,"MassiveIntentClassification (id)":39.66,"MassiveIntentClassification (is)":35.14,"MassiveIntentClassification (it)":43.17,"MassiveIntentClassification (ja)":30.94,"MassiveIntentClassification (jv)":36.69,"MassiveIntentClassification (ka)":9.17,"MassiveIntentClassification (km)":4.99,"MassiveIntentClassification (kn)":3.08,"MassiveIntentClassification (ko)":19.97,"MassiveIntentClassification (lv)":38.61,"MassiveIntentClassification (ml)":2.85,"MassiveIntentClassification (mn)":23.25,"MassiveIntentClassification (ms)":36.21,"MassiveIntentClassification (my)":4.38,"MassiveIntentClassification (nl)":41.85,"MassiveIntentClassification (pt)":45.12,"MassiveIntentClassification (ro)":41.71,"MassiveIntentClassification (ru)":26.33,"MassiveIntentClassification (sl)":38.52,"MassiveIntentClassification (sq)":41.62,"MassiveIntentClassification (sw)":35.28,"MassiveIntentClassification (ta)":13.1,"MassiveIntentClassification (te)":2.56,"MassiveIntentClassification (th)":10.54,"MassiveIntentClassification (tl)":38.56,"MassiveIntentClassification (tr)":35.9,"MassiveIntentClassification (ur)":16.18,"MassiveIntentClassification (vi)":37.38,"MassiveIntentClassification (zh-TW)":22.39,"MassiveScenarioClassification (af)":45.71,"MassiveScenarioClassification (am)":7.41,"MassiveScenarioClassification (ar)":27.62,"MassiveScenarioClassification (az)":39.58,"MassiveScenarioClassification (bn)":18.98,"MassiveScenarioClassification (cy)":41.4,"MassiveScenarioClassification (de)":52.07,"MassiveScenarioClassification (el)":35.51,"MassiveScenarioClassification (es)":50.74,"MassiveScenarioClassification (fa)":29.0,"MassiveScenarioClassification (fi)":45.8,"MassiveScenarioClassification (fr)":53.76,"MassiveScenarioClassification (he)":25.68,"MassiveScenarioClassification (hi)":23.02,"MassiveScenarioClassification (hu)":44.09,"MassiveScenarioClassification (hy)":14.83,"MassiveScenarioClassification (id)":44.35,"MassiveScenarioClassification (is)":43.08,"MassiveScenarioClassification (it)":51.71,"MassiveScenarioClassification (ja)":36.75,"MassiveScenarioClassification (jv)":44.57,"MassiveScenarioClassification (ka)":14.84,"MassiveScenarioClassification (km)":9.75,"MassiveScenarioClassification (kn)":8.32,"MassiveScenarioClassification (ko)":25.72,"MassiveScenarioClassification (lv)":42.75,"MassiveScenarioClassification (ml)":7.25,"MassiveScenarioClassification (mn)":29.03,"MassiveScenarioClassification (ms)":44.65,"MassiveScenarioClassification (my)":10.07,"MassiveScenarioClassification (nl)":49.15,"MassiveScenarioClassification (pt)":53.0,"MassiveScenarioClassification (ro)":49.97,"MassiveScenarioClassification (ru)":28.75,"MassiveScenarioClassification (sl)":42.26,"MassiveScenarioClassification (sq)":49.14,"MassiveScenarioClassification (sw)":43.18,"MassiveScenarioClassification (ta)":19.38,"MassiveScenarioClassification (te)":7.74,"MassiveScenarioClassification (th)":18.32,"MassiveScenarioClassification (tl)":48.31,"MassiveScenarioClassification (tr)":41.79,"MassiveScenarioClassification (ur)":24.46,"MassiveScenarioClassification (vi)":40.94,"MassiveScenarioClassification (zh-TW)":31.16} +{"Rank":5,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":30.51,"AmazonCounterfactualClassification (de)":59.79,"AmazonCounterfactualClassification (ja)":50.59,"AmazonReviewsClassification (de)":35.06,"AmazonReviewsClassification (es)":37.18,"AmazonReviewsClassification (fr)":35.48,"AmazonReviewsClassification (ja)":22.24,"AmazonReviewsClassification (zh)":21.89,"MTOPDomainClassification (de)":85.42,"MTOPDomainClassification (es)":88.2,"MTOPDomainClassification (fr)":85.05,"MTOPDomainClassification (hi)":21.74,"MTOPDomainClassification (th)":15.87,"MTOPIntentClassification (de)":55.75,"MTOPIntentClassification (es)":57.73,"MTOPIntentClassification (fr)":51.07,"MTOPIntentClassification (hi)":3.19,"MTOPIntentClassification (th)":5.55,"MassiveIntentClassification (af)":42.6,"MassiveIntentClassification (am)":2.12,"MassiveIntentClassification (ar)":4.64,"MassiveIntentClassification (az)":35.05,"MassiveIntentClassification (bn)":2.84,"MassiveIntentClassification (cy)":36.19,"MassiveIntentClassification (de)":55.49,"MassiveIntentClassification (el)":10.14,"MassiveIntentClassification (es)":56.72,"MassiveIntentClassification (fa)":3.54,"MassiveIntentClassification (fi)":37.13,"MassiveIntentClassification (fr)":57.67,"MassiveIntentClassification (he)":2.56,"MassiveIntentClassification (hi)":3.24,"MassiveIntentClassification (hu)":34.22,"MassiveIntentClassification (hy)":3.01,"MassiveIntentClassification (id)":46.54,"MassiveIntentClassification (is)":34.77,"MassiveIntentClassification (it)":54.13,"MassiveIntentClassification (ja)":4.27,"MassiveIntentClassification (jv)":36.97,"MassiveIntentClassification (ka)":2.72,"MassiveIntentClassification (km)":5.35,"MassiveIntentClassification (kn)":3.17,"MassiveIntentClassification (ko)":2.64,"MassiveIntentClassification (lv)":36.32,"MassiveIntentClassification (ml)":3.18,"MassiveIntentClassification (mn)":22.85,"MassiveIntentClassification (ms)":42.87,"MassiveIntentClassification (my)":4.04,"MassiveIntentClassification (nl)":49.53,"MassiveIntentClassification (pt)":57.03,"MassiveIntentClassification (ro)":49.95,"MassiveIntentClassification (ru)":36.58,"MassiveIntentClassification (sl)":39.44,"MassiveIntentClassification (sq)":41.78,"MassiveIntentClassification (sw)":35.85,"MassiveIntentClassification (ta)":2.32,"MassiveIntentClassification (te)":2.2,"MassiveIntentClassification (th)":3.74,"MassiveIntentClassification (tl)":43.12,"MassiveIntentClassification (tr)":35.24,"MassiveIntentClassification (ur)":3.0,"MassiveIntentClassification (vi)":30.01,"MassiveIntentClassification (zh-TW)":3.35,"MassiveScenarioClassification (af)":52.54,"MassiveScenarioClassification (am)":6.3,"MassiveScenarioClassification (ar)":11.96,"MassiveScenarioClassification (az)":40.17,"MassiveScenarioClassification (bn)":8.29,"MassiveScenarioClassification (cy)":42.24,"MassiveScenarioClassification (de)":68.09,"MassiveScenarioClassification (el)":16.66,"MassiveScenarioClassification (es)":64.32,"MassiveScenarioClassification (fa)":6.9,"MassiveScenarioClassification (fi)":43.96,"MassiveScenarioClassification (fr)":66.72,"MassiveScenarioClassification (he)":7.51,"MassiveScenarioClassification (hi)":7.82,"MassiveScenarioClassification (hu)":42.16,"MassiveScenarioClassification (hy)":9.33,"MassiveScenarioClassification (id)":53.54,"MassiveScenarioClassification (is)":42.84,"MassiveScenarioClassification (it)":62.44,"MassiveScenarioClassification (ja)":7.29,"MassiveScenarioClassification (jv)":43.13,"MassiveScenarioClassification (ka)":7.63,"MassiveScenarioClassification (km)":9.08,"MassiveScenarioClassification (kn)":8.1,"MassiveScenarioClassification (ko)":6.35,"MassiveScenarioClassification (lv)":40.24,"MassiveScenarioClassification (ml)":7.65,"MassiveScenarioClassification (mn)":27.98,"MassiveScenarioClassification (ms)":52.41,"MassiveScenarioClassification (my)":9.21,"MassiveScenarioClassification (nl)":60.35,"MassiveScenarioClassification (pt)":62.78,"MassiveScenarioClassification (ro)":59.62,"MassiveScenarioClassification (ru)":43.44,"MassiveScenarioClassification (sl)":44.79,"MassiveScenarioClassification (sq)":50.84,"MassiveScenarioClassification (sw)":44.63,"MassiveScenarioClassification (ta)":7.95,"MassiveScenarioClassification (te)":7.5,"MassiveScenarioClassification (th)":8.79,"MassiveScenarioClassification (tl)":53.54,"MassiveScenarioClassification (tr)":42.47,"MassiveScenarioClassification (ur)":9.58,"MassiveScenarioClassification (vi)":34.68,"MassiveScenarioClassification (zh-TW)":8.77} +{"Rank":6,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":29.6,"AmazonCounterfactualClassification (de)":67.01,"AmazonCounterfactualClassification (ja)":45.61,"AmazonReviewsClassification (de)":44.05,"AmazonReviewsClassification (es)":45.01,"AmazonReviewsClassification (fr)":43.52,"AmazonReviewsClassification (ja)":22.23,"AmazonReviewsClassification (zh)":21.88,"MTOPDomainClassification (de)":83.28,"MTOPDomainClassification (es)":85.32,"MTOPDomainClassification (fr)":85.14,"MTOPDomainClassification (hi)":20.85,"MTOPDomainClassification (th)":15.62,"MTOPIntentClassification (de)":54.65,"MTOPIntentClassification (es)":57.38,"MTOPIntentClassification (fr)":54.39,"MTOPIntentClassification (hi)":3.28,"MTOPIntentClassification (th)":5.08,"MassiveIntentClassification (af)":40.17,"MassiveIntentClassification (am)":2.18,"MassiveIntentClassification (ar)":4.18,"MassiveIntentClassification (az)":30.02,"MassiveIntentClassification (bn)":2.6,"MassiveIntentClassification (cy)":29.15,"MassiveIntentClassification (de)":57.43,"MassiveIntentClassification (el)":9.96,"MassiveIntentClassification (es)":57.97,"MassiveIntentClassification (fa)":3.6,"MassiveIntentClassification (fi)":34.02,"MassiveIntentClassification (fr)":60.99,"MassiveIntentClassification (he)":2.51,"MassiveIntentClassification (hi)":3.02,"MassiveIntentClassification (hu)":31.66,"MassiveIntentClassification (hy)":3.32,"MassiveIntentClassification (id)":41.53,"MassiveIntentClassification (is)":30.25,"MassiveIntentClassification (it)":56.57,"MassiveIntentClassification (ja)":3.5,"MassiveIntentClassification (jv)":31.67,"MassiveIntentClassification (ka)":2.79,"MassiveIntentClassification (km)":5.43,"MassiveIntentClassification (kn)":2.79,"MassiveIntentClassification (ko)":2.67,"MassiveIntentClassification (lv)":34.25,"MassiveIntentClassification (ml)":2.98,"MassiveIntentClassification (mn)":20.99,"MassiveIntentClassification (ms)":37.43,"MassiveIntentClassification (my)":4.02,"MassiveIntentClassification (nl)":50.51,"MassiveIntentClassification (pt)":57.95,"MassiveIntentClassification (ro)":49.37,"MassiveIntentClassification (ru)":33.46,"MassiveIntentClassification (sl)":36.33,"MassiveIntentClassification (sq)":37.65,"MassiveIntentClassification (sw)":30.6,"MassiveIntentClassification (ta)":1.79,"MassiveIntentClassification (te)":2.26,"MassiveIntentClassification (th)":4.02,"MassiveIntentClassification (tl)":38.92,"MassiveIntentClassification (tr)":32.05,"MassiveIntentClassification (ur)":2.7,"MassiveIntentClassification (vi)":21.47,"MassiveIntentClassification (zh-TW)":3.24,"MassiveScenarioClassification (af)":50.81,"MassiveScenarioClassification (am)":6.95,"MassiveScenarioClassification (ar)":12.32,"MassiveScenarioClassification (az)":38.79,"MassiveScenarioClassification (bn)":8.0,"MassiveScenarioClassification (cy)":33.91,"MassiveScenarioClassification (de)":65.33,"MassiveScenarioClassification (el)":16.89,"MassiveScenarioClassification (es)":62.52,"MassiveScenarioClassification (fa)":6.08,"MassiveScenarioClassification (fi)":43.34,"MassiveScenarioClassification (fr)":66.42,"MassiveScenarioClassification (he)":7.55,"MassiveScenarioClassification (hi)":7.44,"MassiveScenarioClassification (hu)":40.85,"MassiveScenarioClassification (hy)":9.25,"MassiveScenarioClassification (id)":51.92,"MassiveScenarioClassification (is)":40.09,"MassiveScenarioClassification (it)":62.94,"MassiveScenarioClassification (ja)":7.9,"MassiveScenarioClassification (jv)":41.33,"MassiveScenarioClassification (ka)":7.76,"MassiveScenarioClassification (km)":9.19,"MassiveScenarioClassification (kn)":8.36,"MassiveScenarioClassification (ko)":6.13,"MassiveScenarioClassification (lv)":40.7,"MassiveScenarioClassification (ml)":6.98,"MassiveScenarioClassification (mn)":27.0,"MassiveScenarioClassification (ms)":46.9,"MassiveScenarioClassification (my)":9.55,"MassiveScenarioClassification (nl)":59.65,"MassiveScenarioClassification (pt)":62.18,"MassiveScenarioClassification (ro)":58.22,"MassiveScenarioClassification (ru)":40.73,"MassiveScenarioClassification (sl)":43.66,"MassiveScenarioClassification (sq)":49.25,"MassiveScenarioClassification (sw)":40.55,"MassiveScenarioClassification (ta)":7.46,"MassiveScenarioClassification (te)":7.03,"MassiveScenarioClassification (th)":8.52,"MassiveScenarioClassification (tl)":51.74,"MassiveScenarioClassification (tr)":43.01,"MassiveScenarioClassification (ur)":9.61,"MassiveScenarioClassification (vi)":28.91,"MassiveScenarioClassification (zh-TW)":7.14} +{"Rank":7,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":28.9,"AmazonCounterfactualClassification (de)":59.38,"AmazonCounterfactualClassification (ja)":45.87,"AmazonReviewsClassification (de)":33.06,"AmazonReviewsClassification (es)":34.0,"AmazonReviewsClassification (fr)":33.48,"AmazonReviewsClassification (ja)":21.78,"AmazonReviewsClassification (zh)":21.83,"MTOPDomainClassification (de)":81.91,"MTOPDomainClassification (es)":84.7,"MTOPDomainClassification (fr)":82.48,"MTOPDomainClassification (hi)":22.11,"MTOPDomainClassification (th)":16.36,"MTOPIntentClassification (de)":52.13,"MTOPIntentClassification (es)":52.62,"MTOPIntentClassification (fr)":46.39,"MTOPIntentClassification (hi)":3.9,"MTOPIntentClassification (th)":5.38,"MassiveIntentClassification (af)":41.02,"MassiveIntentClassification (am)":2.34,"MassiveIntentClassification (ar)":4.87,"MassiveIntentClassification (az)":34.92,"MassiveIntentClassification (bn)":2.52,"MassiveIntentClassification (cy)":35.87,"MassiveIntentClassification (de)":51.48,"MassiveIntentClassification (el)":10.0,"MassiveIntentClassification (es)":53.3,"MassiveIntentClassification (fa)":3.59,"MassiveIntentClassification (fi)":37.35,"MassiveIntentClassification (fr)":54.83,"MassiveIntentClassification (he)":2.52,"MassiveIntentClassification (hi)":2.88,"MassiveIntentClassification (hu)":33.52,"MassiveIntentClassification (hy)":3.13,"MassiveIntentClassification (id)":40.11,"MassiveIntentClassification (is)":34.77,"MassiveIntentClassification (it)":51.21,"MassiveIntentClassification (ja)":4.75,"MassiveIntentClassification (jv)":35.6,"MassiveIntentClassification (ka)":2.71,"MassiveIntentClassification (km)":5.48,"MassiveIntentClassification (kn)":2.44,"MassiveIntentClassification (ko)":2.59,"MassiveIntentClassification (lv)":38.15,"MassiveIntentClassification (ml)":2.67,"MassiveIntentClassification (mn)":18.47,"MassiveIntentClassification (ms)":35.58,"MassiveIntentClassification (my)":4.35,"MassiveIntentClassification (nl)":45.96,"MassiveIntentClassification (pt)":52.27,"MassiveIntentClassification (ro)":46.39,"MassiveIntentClassification (ru)":16.82,"MassiveIntentClassification (sl)":37.3,"MassiveIntentClassification (sq)":41.73,"MassiveIntentClassification (sw)":35.97,"MassiveIntentClassification (ta)":1.52,"MassiveIntentClassification (te)":2.57,"MassiveIntentClassification (th)":3.94,"MassiveIntentClassification (tl)":41.03,"MassiveIntentClassification (tr)":33.75,"MassiveIntentClassification (ur)":2.57,"MassiveIntentClassification (vi)":25.23,"MassiveIntentClassification (zh-TW)":4.64,"MassiveScenarioClassification (af)":51.48,"MassiveScenarioClassification (am)":7.74,"MassiveScenarioClassification (ar)":12.03,"MassiveScenarioClassification (az)":41.77,"MassiveScenarioClassification (bn)":8.07,"MassiveScenarioClassification (cy)":43.67,"MassiveScenarioClassification (de)":63.63,"MassiveScenarioClassification (el)":16.83,"MassiveScenarioClassification (es)":61.48,"MassiveScenarioClassification (fa)":6.48,"MassiveScenarioClassification (fi)":43.54,"MassiveScenarioClassification (fr)":64.06,"MassiveScenarioClassification (he)":8.03,"MassiveScenarioClassification (hi)":7.5,"MassiveScenarioClassification (hu)":42.59,"MassiveScenarioClassification (hy)":9.22,"MassiveScenarioClassification (id)":48.67,"MassiveScenarioClassification (is)":43.87,"MassiveScenarioClassification (it)":59.83,"MassiveScenarioClassification (ja)":5.62,"MassiveScenarioClassification (jv)":42.18,"MassiveScenarioClassification (ka)":7.52,"MassiveScenarioClassification (km)":9.55,"MassiveScenarioClassification (kn)":8.34,"MassiveScenarioClassification (ko)":6.11,"MassiveScenarioClassification (lv)":43.35,"MassiveScenarioClassification (ml)":7.28,"MassiveScenarioClassification (mn)":23.94,"MassiveScenarioClassification (ms)":45.18,"MassiveScenarioClassification (my)":9.33,"MassiveScenarioClassification (nl)":57.02,"MassiveScenarioClassification (pt)":59.45,"MassiveScenarioClassification (ro)":56.8,"MassiveScenarioClassification (ru)":25.85,"MassiveScenarioClassification (sl)":42.51,"MassiveScenarioClassification (sq)":50.41,"MassiveScenarioClassification (sw)":43.02,"MassiveScenarioClassification (ta)":7.21,"MassiveScenarioClassification (te)":6.9,"MassiveScenarioClassification (th)":8.7,"MassiveScenarioClassification (tl)":51.76,"MassiveScenarioClassification (tr)":42.54,"MassiveScenarioClassification (ur)":9.32,"MassiveScenarioClassification (vi)":31.51,"MassiveScenarioClassification (zh-TW)":8.16} +{"Rank":8,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":28.26,"AmazonCounterfactualClassification (de)":67.97,"AmazonCounterfactualClassification (ja)":45.72,"AmazonReviewsClassification (de)":43.16,"AmazonReviewsClassification (es)":42.89,"AmazonReviewsClassification (fr)":41.48,"AmazonReviewsClassification (ja)":22.49,"AmazonReviewsClassification (zh)":22.12,"MTOPDomainClassification (de)":80.56,"MTOPDomainClassification (es)":80.78,"MTOPDomainClassification (fr)":79.6,"MTOPDomainClassification (hi)":21.22,"MTOPDomainClassification (th)":15.82,"MTOPIntentClassification (de)":52.5,"MTOPIntentClassification (es)":52.07,"MTOPIntentClassification (fr)":47.73,"MTOPIntentClassification (hi)":3.74,"MTOPIntentClassification (th)":4.96,"MassiveIntentClassification (af)":38.41,"MassiveIntentClassification (am)":2.49,"MassiveIntentClassification (ar)":4.7,"MassiveIntentClassification (az)":31.77,"MassiveIntentClassification (bn)":2.77,"MassiveIntentClassification (cy)":31.69,"MassiveIntentClassification (de)":52.01,"MassiveIntentClassification (el)":9.74,"MassiveIntentClassification (es)":54.1,"MassiveIntentClassification (fa)":3.86,"MassiveIntentClassification (fi)":34.07,"MassiveIntentClassification (fr)":57.01,"MassiveIntentClassification (he)":2.14,"MassiveIntentClassification (hi)":2.97,"MassiveIntentClassification (hu)":32.01,"MassiveIntentClassification (hy)":3.17,"MassiveIntentClassification (id)":34.55,"MassiveIntentClassification (is)":32.0,"MassiveIntentClassification (it)":52.94,"MassiveIntentClassification (ja)":2.9,"MassiveIntentClassification (jv)":32.42,"MassiveIntentClassification (ka)":2.71,"MassiveIntentClassification (km)":5.5,"MassiveIntentClassification (kn)":2.41,"MassiveIntentClassification (ko)":2.57,"MassiveIntentClassification (lv)":35.09,"MassiveIntentClassification (ml)":2.95,"MassiveIntentClassification (mn)":18.33,"MassiveIntentClassification (ms)":29.69,"MassiveIntentClassification (my)":3.99,"MassiveIntentClassification (nl)":44.95,"MassiveIntentClassification (pt)":51.96,"MassiveIntentClassification (ro)":43.83,"MassiveIntentClassification (ru)":17.32,"MassiveIntentClassification (sl)":33.71,"MassiveIntentClassification (sq)":37.62,"MassiveIntentClassification (sw)":31.9,"MassiveIntentClassification (ta)":1.91,"MassiveIntentClassification (te)":2.54,"MassiveIntentClassification (th)":3.85,"MassiveIntentClassification (tl)":36.83,"MassiveIntentClassification (tr)":33.0,"MassiveIntentClassification (ur)":2.62,"MassiveIntentClassification (vi)":22.81,"MassiveIntentClassification (zh-TW)":3.49,"MassiveScenarioClassification (af)":50.28,"MassiveScenarioClassification (am)":7.15,"MassiveScenarioClassification (ar)":12.12,"MassiveScenarioClassification (az)":39.68,"MassiveScenarioClassification (bn)":8.06,"MassiveScenarioClassification (cy)":38.01,"MassiveScenarioClassification (de)":62.71,"MassiveScenarioClassification (el)":17.19,"MassiveScenarioClassification (es)":59.56,"MassiveScenarioClassification (fa)":6.5,"MassiveScenarioClassification (fi)":41.72,"MassiveScenarioClassification (fr)":63.6,"MassiveScenarioClassification (he)":7.93,"MassiveScenarioClassification (hi)":7.85,"MassiveScenarioClassification (hu)":41.37,"MassiveScenarioClassification (hy)":9.42,"MassiveScenarioClassification (id)":44.88,"MassiveScenarioClassification (is)":40.86,"MassiveScenarioClassification (it)":60.09,"MassiveScenarioClassification (ja)":6.56,"MassiveScenarioClassification (jv)":40.18,"MassiveScenarioClassification (ka)":7.37,"MassiveScenarioClassification (km)":9.56,"MassiveScenarioClassification (kn)":8.4,"MassiveScenarioClassification (ko)":5.96,"MassiveScenarioClassification (lv)":41.44,"MassiveScenarioClassification (ml)":7.47,"MassiveScenarioClassification (mn)":25.36,"MassiveScenarioClassification (ms)":39.69,"MassiveScenarioClassification (my)":9.68,"MassiveScenarioClassification (nl)":56.09,"MassiveScenarioClassification (pt)":57.99,"MassiveScenarioClassification (ro)":56.0,"MassiveScenarioClassification (ru)":27.47,"MassiveScenarioClassification (sl)":41.04,"MassiveScenarioClassification (sq)":49.38,"MassiveScenarioClassification (sw)":40.62,"MassiveScenarioClassification (ta)":7.59,"MassiveScenarioClassification (te)":7.07,"MassiveScenarioClassification (th)":8.52,"MassiveScenarioClassification (tl)":49.89,"MassiveScenarioClassification (tr)":43.08,"MassiveScenarioClassification (ur)":9.31,"MassiveScenarioClassification (vi)":27.46,"MassiveScenarioClassification (zh-TW)":7.24} +{"Rank":9,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":26.73,"AmazonCounterfactualClassification (de)":69.98,"AmazonCounterfactualClassification (ja)":46.05,"AmazonReviewsClassification (de)":37.9,"AmazonReviewsClassification (es)":37.33,"AmazonReviewsClassification (fr)":37.35,"AmazonReviewsClassification (ja)":22.29,"AmazonReviewsClassification (zh)":21.53,"MTOPDomainClassification (de)":76.98,"MTOPDomainClassification (es)":73.61,"MTOPDomainClassification (fr)":75.03,"MTOPDomainClassification (hi)":21.4,"MTOPDomainClassification (th)":16.21,"MTOPIntentClassification (de)":44.43,"MTOPIntentClassification (es)":42.03,"MTOPIntentClassification (fr)":43.85,"MTOPIntentClassification (hi)":3.8,"MTOPIntentClassification (th)":5.21,"MassiveIntentClassification (af)":34.32,"MassiveIntentClassification (am)":2.38,"MassiveIntentClassification (ar)":4.53,"MassiveIntentClassification (az)":31.76,"MassiveIntentClassification (bn)":2.58,"MassiveIntentClassification (cy)":28.94,"MassiveIntentClassification (de)":45.23,"MassiveIntentClassification (el)":10.05,"MassiveIntentClassification (es)":45.32,"MassiveIntentClassification (fa)":3.58,"MassiveIntentClassification (fi)":33.52,"MassiveIntentClassification (fr)":51.13,"MassiveIntentClassification (he)":2.63,"MassiveIntentClassification (hi)":2.68,"MassiveIntentClassification (hu)":32.31,"MassiveIntentClassification (hy)":3.33,"MassiveIntentClassification (id)":35.5,"MassiveIntentClassification (is)":29.82,"MassiveIntentClassification (it)":45.59,"MassiveIntentClassification (ja)":3.67,"MassiveIntentClassification (jv)":31.15,"MassiveIntentClassification (ka)":2.77,"MassiveIntentClassification (km)":5.66,"MassiveIntentClassification (kn)":2.59,"MassiveIntentClassification (ko)":2.34,"MassiveIntentClassification (lv)":33.97,"MassiveIntentClassification (ml)":2.55,"MassiveIntentClassification (mn)":14.7,"MassiveIntentClassification (ms)":33.12,"MassiveIntentClassification (my)":4.42,"MassiveIntentClassification (nl)":37.96,"MassiveIntentClassification (pt)":43.35,"MassiveIntentClassification (ro)":42.69,"MassiveIntentClassification (ru)":14.82,"MassiveIntentClassification (sl)":34.54,"MassiveIntentClassification (sq)":38.54,"MassiveIntentClassification (sw)":32.14,"MassiveIntentClassification (ta)":1.41,"MassiveIntentClassification (te)":2.5,"MassiveIntentClassification (th)":3.71,"MassiveIntentClassification (tl)":36.04,"MassiveIntentClassification (tr)":33.77,"MassiveIntentClassification (ur)":2.99,"MassiveIntentClassification (vi)":22.62,"MassiveIntentClassification (zh-TW)":4.63,"MassiveScenarioClassification (af)":44.45,"MassiveScenarioClassification (am)":7.51,"MassiveScenarioClassification (ar)":12.32,"MassiveScenarioClassification (az)":38.41,"MassiveScenarioClassification (bn)":8.45,"MassiveScenarioClassification (cy)":35.04,"MassiveScenarioClassification (de)":59.12,"MassiveScenarioClassification (el)":17.68,"MassiveScenarioClassification (es)":55.61,"MassiveScenarioClassification (fa)":6.86,"MassiveScenarioClassification (fi)":41.34,"MassiveScenarioClassification (fr)":59.92,"MassiveScenarioClassification (he)":7.86,"MassiveScenarioClassification (hi)":7.63,"MassiveScenarioClassification (hu)":41.31,"MassiveScenarioClassification (hy)":9.23,"MassiveScenarioClassification (id)":44.64,"MassiveScenarioClassification (is)":39.63,"MassiveScenarioClassification (it)":54.58,"MassiveScenarioClassification (ja)":4.96,"MassiveScenarioClassification (jv)":40.73,"MassiveScenarioClassification (ka)":7.51,"MassiveScenarioClassification (km)":8.73,"MassiveScenarioClassification (kn)":7.99,"MassiveScenarioClassification (ko)":6.03,"MassiveScenarioClassification (lv)":36.42,"MassiveScenarioClassification (ml)":6.96,"MassiveScenarioClassification (mn)":19.85,"MassiveScenarioClassification (ms)":43.18,"MassiveScenarioClassification (my)":9.46,"MassiveScenarioClassification (nl)":50.0,"MassiveScenarioClassification (pt)":52.24,"MassiveScenarioClassification (ro)":53.7,"MassiveScenarioClassification (ru)":20.69,"MassiveScenarioClassification (sl)":39.79,"MassiveScenarioClassification (sq)":50.16,"MassiveScenarioClassification (sw)":40.48,"MassiveScenarioClassification (ta)":7.47,"MassiveScenarioClassification (te)":6.87,"MassiveScenarioClassification (th)":8.26,"MassiveScenarioClassification (tl)":48.94,"MassiveScenarioClassification (tr)":41.83,"MassiveScenarioClassification (ur)":9.77,"MassiveScenarioClassification (vi)":30.01,"MassiveScenarioClassification (zh-TW)":7.91} +{"Rank":10,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":24.49,"AmazonCounterfactualClassification (de)":54.46,"AmazonCounterfactualClassification (ja)":43.87,"AmazonReviewsClassification (de)":24.08,"AmazonReviewsClassification (es)":23.88,"AmazonReviewsClassification (fr)":23.31,"AmazonReviewsClassification (ja)":20.25,"AmazonReviewsClassification (zh)":20.49,"MTOPDomainClassification (de)":48.55,"MTOPDomainClassification (es)":58.39,"MTOPDomainClassification (fr)":54.61,"MTOPDomainClassification (hi)":21.22,"MTOPDomainClassification (th)":14.98,"MTOPIntentClassification (de)":35.55,"MTOPIntentClassification (es)":36.72,"MTOPIntentClassification (fr)":34.71,"MTOPIntentClassification (hi)":4.44,"MTOPIntentClassification (th)":4.67,"MassiveIntentClassification (af)":33.68,"MassiveIntentClassification (am)":2.94,"MassiveIntentClassification (ar)":10.04,"MassiveIntentClassification (az)":30.74,"MassiveIntentClassification (bn)":3.02,"MassiveIntentClassification (cy)":33.94,"MassiveIntentClassification (de)":36.06,"MassiveIntentClassification (el)":27.7,"MassiveIntentClassification (es)":35.6,"MassiveIntentClassification (fa)":17.97,"MassiveIntentClassification (fi)":35.53,"MassiveIntentClassification (fr)":38.41,"MassiveIntentClassification (he)":2.69,"MassiveIntentClassification (hi)":3.43,"MassiveIntentClassification (hu)":34.05,"MassiveIntentClassification (hy)":3.11,"MassiveIntentClassification (id)":40.02,"MassiveIntentClassification (is)":32.63,"MassiveIntentClassification (it)":39.28,"MassiveIntentClassification (ja)":4.95,"MassiveIntentClassification (jv)":34.95,"MassiveIntentClassification (ka)":2.57,"MassiveIntentClassification (km)":4.73,"MassiveIntentClassification (kn)":3.54,"MassiveIntentClassification (ko)":2.68,"MassiveIntentClassification (lv)":37.91,"MassiveIntentClassification (ml)":2.88,"MassiveIntentClassification (mn)":16.94,"MassiveIntentClassification (ms)":36.6,"MassiveIntentClassification (my)":3.96,"MassiveIntentClassification (nl)":33.95,"MassiveIntentClassification (pt)":43.05,"MassiveIntentClassification (ro)":36.2,"MassiveIntentClassification (ru)":25.3,"MassiveIntentClassification (sl)":35.9,"MassiveIntentClassification (sq)":36.6,"MassiveIntentClassification (sw)":34.81,"MassiveIntentClassification (ta)":3.11,"MassiveIntentClassification (te)":2.53,"MassiveIntentClassification (th)":4.38,"MassiveIntentClassification (tl)":35.51,"MassiveIntentClassification (tr)":32.02,"MassiveIntentClassification (ur)":9.61,"MassiveIntentClassification (vi)":37.07,"MassiveIntentClassification (zh-TW)":4.79,"MassiveScenarioClassification (af)":36.17,"MassiveScenarioClassification (am)":7.64,"MassiveScenarioClassification (ar)":15.26,"MassiveScenarioClassification (az)":30.73,"MassiveScenarioClassification (bn)":7.15,"MassiveScenarioClassification (cy)":34.73,"MassiveScenarioClassification (de)":38.62,"MassiveScenarioClassification (el)":27.18,"MassiveScenarioClassification (es)":39.44,"MassiveScenarioClassification (fa)":21.43,"MassiveScenarioClassification (fi)":33.21,"MassiveScenarioClassification (fr)":40.26,"MassiveScenarioClassification (he)":7.42,"MassiveScenarioClassification (hi)":8.06,"MassiveScenarioClassification (hu)":34.54,"MassiveScenarioClassification (hy)":8.61,"MassiveScenarioClassification (id)":40.04,"MassiveScenarioClassification (is)":33.57,"MassiveScenarioClassification (it)":40.1,"MassiveScenarioClassification (ja)":9.96,"MassiveScenarioClassification (jv)":36.11,"MassiveScenarioClassification (ka)":7.13,"MassiveScenarioClassification (km)":9.66,"MassiveScenarioClassification (kn)":7.55,"MassiveScenarioClassification (ko)":7.27,"MassiveScenarioClassification (lv)":37.03,"MassiveScenarioClassification (ml)":7.22,"MassiveScenarioClassification (mn)":21.53,"MassiveScenarioClassification (ms)":37.57,"MassiveScenarioClassification (my)":9.54,"MassiveScenarioClassification (nl)":34.62,"MassiveScenarioClassification (pt)":44.68,"MassiveScenarioClassification (ro)":37.29,"MassiveScenarioClassification (ru)":28.16,"MassiveScenarioClassification (sl)":37.95,"MassiveScenarioClassification (sq)":37.82,"MassiveScenarioClassification (sw)":35.37,"MassiveScenarioClassification (ta)":7.19,"MassiveScenarioClassification (te)":7.29,"MassiveScenarioClassification (th)":9.47,"MassiveScenarioClassification (tl)":37.31,"MassiveScenarioClassification (tr)":34.57,"MassiveScenarioClassification (ur)":16.17,"MassiveScenarioClassification (vi)":35.91,"MassiveScenarioClassification (zh-TW)":10.19} +{"Rank":11,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.59,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.05,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":66.09,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.83,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.71,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":12,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":37.26,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":79.79,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":45.62,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":53.7,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":62.46,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":13,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":42.15,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":87.68,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":59.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":63.08,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":70.15,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":14,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.98,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.12,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":62.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.94,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":72.78,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":15,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.36,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.33,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":60.52,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":68.06,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":74.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":16,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.18,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":17,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":52.95,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":18,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.15,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":19,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":41.94,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":20,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":41.38,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":21,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":35.91,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":22,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":38.6,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":80.79,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":50.01,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":56.31,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":59.5,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":23,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.89,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.23,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":61.07,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.94,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":67.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":24,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":33.77,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":25,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.38,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.65,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.87,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.28,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.5,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":26,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.35,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.7,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.85,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.28,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":27,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.39,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.63,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.86,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.3,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":28,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":34.79,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.52,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.12,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":59.41,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":65.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":29,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.03,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":77.1,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":43.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":51.59,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":61.28,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":30,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":37.97,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.74,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.62,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":31,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":24.9,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":25.55,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":9.49,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":6.98,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":11.41,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":32,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":23.52,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":27.74,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":8.61,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":6.24,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":10.98,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":33,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":22.45,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":24.27,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":9.79,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":16.41,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":22.72,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":34,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.39,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.61,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.84,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.3,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":35,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.02,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":64.49,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":39.4,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":38.01,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":43.63,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":36,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.71,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":74.8,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":53.97,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":46.39,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":53.86,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":37,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":40.94,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":84.79,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":55.51,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":38,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.91,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.41,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":59.43,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":39,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":39.68,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":81.2,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":46.01,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":40,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":35.12,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":69.24,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":51.25,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":43.21,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":49.78,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":41,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":26.85,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":34.99,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":15.76,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":15.09,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":21.67,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":42,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":43.02,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":43,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":44.44,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":27.05,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":72.97,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.18,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":42.64,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":49.92,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":45,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AmazonCounterfactualClassification (de)":68.35,"AmazonCounterfactualClassification (ja)":63.45,"AmazonReviewsClassification (de)":35.91,"AmazonReviewsClassification (es)":37.49,"AmazonReviewsClassification (fr)":35.3,"AmazonReviewsClassification (ja)":33.24,"AmazonReviewsClassification (zh)":35.26,"MTOPDomainClassification (de)":79.2,"MTOPDomainClassification (es)":83.04,"MTOPDomainClassification (fr)":78.63,"MTOPDomainClassification (hi)":81.36,"MTOPDomainClassification (th)":79.99,"MTOPIntentClassification (de)":54.23,"MTOPIntentClassification (es)":60.28,"MTOPIntentClassification (fr)":54.05,"MTOPIntentClassification (hi)":59.9,"MTOPIntentClassification (th)":61.96,"MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":57.52,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":64.52,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":46,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":69.95,"AmazonCounterfactualClassification (ja)":69.79,"AmazonReviewsClassification (de)":39.52,"AmazonReviewsClassification (es)":39.99,"AmazonReviewsClassification (fr)":39.0,"AmazonReviewsClassification (ja)":36.64,"AmazonReviewsClassification (zh)":37.74,"MTOPDomainClassification (de)":85.73,"MTOPDomainClassification (es)":86.96,"MTOPDomainClassification (fr)":81.21,"MTOPDomainClassification (hi)":84.76,"MTOPDomainClassification (th)":82.51,"MTOPIntentClassification (de)":61.27,"MTOPIntentClassification (es)":66.59,"MTOPIntentClassification (fr)":59.76,"MTOPIntentClassification (hi)":62.37,"MTOPIntentClassification (th)":64.8,"MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":61.88,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":67.9,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":47,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":46.09,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.2,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.33,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.91,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":68.53,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":48,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.12,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":49,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.46,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":50,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":34.25,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":71.83,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":44.53,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":51.93,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":58.31,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":51,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":33.51,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.5,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":53.98,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":61.19,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":70.22,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":52,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":35.09,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.19,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.64,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.8,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":73.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":53,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":26.75,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":43.83,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":19.38,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":13.58,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":23.21,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":54,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":26.62,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":36.77,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":15.37,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":15.82,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":23.92,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":55,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.76,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":38.3,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":89.38,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":64.45,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.42,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.11,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":56,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":48.3,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} diff --git a/all_data_tasks/35/default.jsonl b/all_data_tasks/35/default.jsonl index 993221a1dca3688768d0cfe4c8c98dce335768d6..0e6bc0c5f1e3bcc7b8685490bf76f82f8745df46 100644 --- a/all_data_tasks/35/default.jsonl +++ b/all_data_tasks/35/default.jsonl @@ -1,269 +1,101 @@ -{"index":147,"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":73.17,"STS17 (ar-ar)":81.87,"STS17 (en-ar)":77.93,"STS17 (en-de)":87.3,"STS17 (en-tr)":72.56,"STS17 (es-en)":88.24,"STS17 (es-es)":87.46,"STS17 (fr-en)":88.06,"STS17 (it-en)":89.68,"STS17 (ko-ko)":83.69,"STS17 (nl-en)":88.25,"STS22 (ar)":54.12,"STS22 (de)":49.12,"STS22 (de-en)":60.92,"STS22 (de-fr)":61.39,"STS22 (de-pl)":54.47,"STS22 (es)":67.0,"STS22 (es-en)":75.84,"STS22 (es-it)":75.04,"STS22 (fr)":69.82,"STS22 (fr-pl)":84.52,"STS22 (it)":75.87,"STS22 (pl)":39.21,"STS22 (pl-en)":73.18,"STS22 (ru)":60.83,"STS22 (tr)":68.72,"STS22 (zh-en)":71.88,"STSBenchmark":88.6} -{"index":151,"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":70.02,"STS17 (ar-ar)":77.88,"STS17 (en-ar)":75.06,"STS17 (en-de)":86.16,"STS17 (en-tr)":71.23,"STS17 (es-en)":80.75,"STS17 (es-es)":86.74,"STS17 (fr-en)":85.62,"STS17 (it-en)":84.54,"STS17 (ko-ko)":82.27,"STS17 (nl-en)":85.28,"STS22 (ar)":56.99,"STS22 (de)":56.59,"STS22 (de-en)":56.6,"STS22 (de-fr)":67.79,"STS22 (de-pl)":49.58,"STS22 (es)":64.58,"STS22 (es-en)":72.52,"STS22 (es-it)":68.93,"STS22 (fr)":76.79,"STS22 (fr-pl)":50.71,"STS22 (it)":76.98,"STS22 (pl)":34.66,"STS22 (pl-en)":65.54,"STS22 (ru)":59.9,"STS22 (tr)":63.55,"STS22 (zh-en)":66.0,"STSBenchmark":87.29} -{"index":103,"Rank":3,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.85,"STS17 (ar-ar)":79.38,"STS17 (en-ar)":58.76,"STS17 (en-de)":76.13,"STS17 (en-tr)":55.53,"STS17 (es-en)":72.26,"STS17 (es-es)":85.06,"STS17 (fr-en)":75.63,"STS17 (it-en)":71.36,"STS17 (ko-ko)":80.79,"STS17 (nl-en)":71.99,"STS22 (ar)":57.44,"STS22 (de)":60.12,"STS22 (de-en)":53.36,"STS22 (de-fr)":58.25,"STS22 (de-pl)":48.47,"STS22 (es)":68.57,"STS22 (es-en)":77.41,"STS22 (es-it)":74.69,"STS22 (fr)":81.47,"STS22 (fr-pl)":73.25,"STS22 (it)":79.28,"STS22 (pl)":42.08,"STS22 (pl-en)":77.5,"STS22 (ru)":61.71,"STS22 (tr)":66.62,"STS22 (zh-en)":69.87,"STSBenchmark":81.95} -{"index":150,"Rank":4,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":68.84,"STS17 (ar-ar)":74.52,"STS17 (en-ar)":71.27,"STS17 (en-de)":82.09,"STS17 (en-tr)":63.33,"STS17 (es-en)":76.5,"STS17 (es-es)":86.74,"STS17 (fr-en)":80.18,"STS17 (it-en)":80.15,"STS17 (ko-ko)":79.95,"STS17 (nl-en)":79.25,"STS22 (ar)":57.87,"STS22 (de)":55.95,"STS22 (de-en)":54.93,"STS22 (de-fr)":59.47,"STS22 (de-pl)":39.35,"STS22 (es)":66.58,"STS22 (es-en)":73.99,"STS22 (es-it)":66.46,"STS22 (fr)":74.8,"STS22 (fr-pl)":73.25,"STS22 (it)":77.76,"STS22 (pl)":34.07,"STS22 (pl-en)":70.37,"STS22 (ru)":60.66,"STS22 (tr)":63.7,"STS22 (zh-en)":69.92,"STSBenchmark":85.64} -{"index":153,"Rank":5,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":67.07,"STS17 (ar-ar)":73.03,"STS17 (en-ar)":57.41,"STS17 (en-de)":77.24,"STS17 (en-tr)":55.97,"STS17 (es-en)":72.44,"STS17 (es-es)":84.84,"STS17 (fr-en)":72.29,"STS17 (it-en)":77.33,"STS17 (ko-ko)":78.87,"STS17 (nl-en)":75.38,"STS22 (ar)":56.65,"STS22 (de)":53.45,"STS22 (de-en)":56.49,"STS22 (de-fr)":60.57,"STS22 (de-pl)":28.24,"STS22 (es)":66.88,"STS22 (es-en)":74.57,"STS22 (es-it)":71.81,"STS22 (fr)":76.58,"STS22 (fr-pl)":84.52,"STS22 (it)":76.53,"STS22 (pl)":35.8,"STS22 (pl-en)":72.69,"STS22 (ru)":59.9,"STS22 (tr)":63.71,"STS22 (zh-en)":63.74,"STSBenchmark":84.01} -{"index":217,"Rank":6,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":66.01,"STS17 (ar-ar)":69.07,"STS17 (en-ar)":74.51,"STS17 (en-de)":73.85,"STS17 (en-tr)":72.07,"STS17 (es-en)":65.71,"STS17 (es-es)":80.83,"STS17 (fr-en)":76.98,"STS17 (it-en)":76.99,"STS17 (ko-ko)":71.32,"STS17 (nl-en)":75.22,"STS22 (ar)":57.67,"STS22 (de)":48.58,"STS22 (de-en)":50.14,"STS22 (de-fr)":53.28,"STS22 (de-pl)":58.69,"STS22 (es)":63.18,"STS22 (es-en)":71.86,"STS22 (es-it)":69.69,"STS22 (fr)":77.95,"STS22 (fr-pl)":61.98,"STS22 (it)":72.22,"STS22 (pl)":39.28,"STS22 (pl-en)":69.41,"STS22 (ru)":57.49,"STS22 (tr)":58.15,"STS22 (zh-en)":64.02,"STSBenchmark":72.25} -{"index":238,"Rank":7,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.66,"STS17 (ar-ar)":78.03,"STS17 (en-ar)":78.6,"STS17 (en-de)":81.48,"STS17 (en-tr)":76.34,"STS17 (es-en)":81.81,"STS17 (es-es)":87.91,"STS17 (fr-en)":78.06,"STS17 (it-en)":80.98,"STS17 (ko-ko)":68.24,"STS17 (nl-en)":81.0,"STS22 (ar)":54.51,"STS22 (de)":46.89,"STS22 (de-en)":45.0,"STS22 (de-fr)":49.43,"STS22 (de-pl)":39.32,"STS22 (es)":58.94,"STS22 (es-en)":67.71,"STS22 (es-it)":50.79,"STS22 (fr)":74.1,"STS22 (fr-pl)":73.25,"STS22 (it)":65.86,"STS22 (pl)":34.81,"STS22 (pl-en)":60.17,"STS22 (ru)":54.51,"STS22 (tr)":57.29,"STS22 (zh-en)":61.29,"STSBenchmark":86.45} -{"index":55,"Rank":8,"Model":"paraphrase-multilingual-mpnet-base-v2-KE_Sieve<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.52,"STS17 (ar-ar)":76.04,"STS17 (en-ar)":77.6,"STS17 (en-de)":70.4,"STS17 (en-tr)":71.04,"STS17 (es-en)":81.59,"STS17 (es-es)":82.77,"STS17 (fr-en)":77.16,"STS17 (it-en)":81.52,"STS17 (ko-ko)":77.0,"STS17 (nl-en)":80.7,"STS22 (ar)":52.61,"STS22 (de)":41.84,"STS22 (de-en)":49.09,"STS22 (de-fr)":50.6,"STS22 (de-pl)":50.44,"STS22 (es)":57.23,"STS22 (es-en)":67.29,"STS22 (es-it)":57.93,"STS22 (fr)":72.79,"STS22 (fr-pl)":73.25,"STS22 (it)":64.17,"STS22 (pl)":36.37,"STS22 (pl-en)":67.72,"STS22 (ru)":53.35,"STS22 (tr)":52.71,"STS22 (zh-en)":64.45,"STSBenchmark":81.34} -{"index":224,"Rank":9,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":65.43,"STS17 (ar-ar)":77.34,"STS17 (en-ar)":77.46,"STS17 (en-de)":80.24,"STS17 (en-tr)":74.34,"STS17 (es-en)":77.4,"STS17 (es-es)":83.71,"STS17 (fr-en)":79.28,"STS17 (it-en)":80.82,"STS17 (ko-ko)":76.4,"STS17 (nl-en)":80.51,"STS22 (ar)":49.04,"STS22 (de)":35.73,"STS22 (de-en)":47.51,"STS22 (de-fr)":60.76,"STS22 (de-pl)":36.09,"STS22 (es)":59.34,"STS22 (es-en)":68.96,"STS22 (es-it)":63.28,"STS22 (fr)":76.41,"STS22 (fr-pl)":61.98,"STS22 (it)":65.1,"STS22 (pl)":34.58,"STS22 (pl-en)":71.33,"STS22 (ru)":52.4,"STS22 (tr)":54.07,"STS22 (zh-en)":61.75,"STSBenchmark":80.75} -{"index":157,"Rank":10,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.1,"STS17 (ar-ar)":81.13,"STS17 (en-ar)":79.64,"STS17 (en-de)":52.52,"STS17 (en-tr)":4.75,"STS17 (es-en)":85.41,"STS17 (es-es)":87.33,"STS17 (fr-en)":83.96,"STS17 (it-en)":45.62,"STS17 (ko-ko)":61.89,"STS17 (nl-en)":46.69,"STS22 (ar)":55.0,"STS22 (de)":37.51,"STS22 (de-en)":51.66,"STS22 (de-fr)":39.66,"STS22 (de-pl)":26.11,"STS22 (es)":59.79,"STS22 (es-en)":73.59,"STS22 (es-it)":67.83,"STS22 (fr)":77.1,"STS22 (fr-pl)":84.52,"STS22 (it)":68.87,"STS22 (pl)":27.98,"STS22 (pl-en)":60.77,"STS22 (ru)":43.14,"STS22 (tr)":42.33,"STS22 (zh-en)":65.01,"STSBenchmark":85.79} -{"index":115,"Rank":11,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.34,"STS17 (ar-ar)":76.42,"STS17 (en-ar)":78.07,"STS17 (en-de)":59.1,"STS17 (en-tr)":11.8,"STS17 (es-en)":78.22,"STS17 (es-es)":86.0,"STS17 (fr-en)":80.46,"STS17 (it-en)":51.58,"STS17 (ko-ko)":66.89,"STS17 (nl-en)":45.85,"STS22 (ar)":58.67,"STS22 (de)":30.05,"STS22 (de-en)":51.16,"STS22 (de-fr)":53.28,"STS22 (de-pl)":43.05,"STS22 (es)":65.41,"STS22 (es-en)":75.06,"STS22 (es-it)":65.5,"STS22 (fr)":80.38,"STS22 (fr-pl)":28.17,"STS22 (it)":65.65,"STS22 (pl)":31.13,"STS22 (pl-en)":53.31,"STS22 (ru)":43.36,"STS22 (tr)":47.14,"STS22 (zh-en)":68.45,"STSBenchmark":80.9} -{"index":132,"Rank":12,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.22,"STS17 (ar-ar)":74.97,"STS17 (en-ar)":74.05,"STS17 (en-de)":74.95,"STS17 (en-tr)":77.18,"STS17 (es-en)":72.25,"STS17 (es-es)":80.65,"STS17 (fr-en)":77.2,"STS17 (it-en)":78.67,"STS17 (ko-ko)":66.14,"STS17 (nl-en)":78.14,"STS22 (ar)":40.25,"STS22 (de)":24.09,"STS22 (de-en)":34.28,"STS22 (de-fr)":41.29,"STS22 (de-pl)":25.81,"STS22 (es)":55.4,"STS22 (es-en)":57.82,"STS22 (es-it)":49.13,"STS22 (fr)":61.72,"STS22 (fr-pl)":61.98,"STS22 (it)":62.2,"STS22 (pl)":25.31,"STS22 (pl-en)":44.72,"STS22 (ru)":43.57,"STS22 (tr)":46.46,"STS22 (zh-en)":49.19,"STSBenchmark":67.39} -{"index":11,"Rank":13,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":52.31,"STS17 (ar-ar)":67.47,"STS17 (en-ar)":65.05,"STS17 (en-de)":66.66,"STS17 (en-tr)":70.05,"STS17 (es-en)":55.3,"STS17 (es-es)":79.67,"STS17 (fr-en)":70.82,"STS17 (it-en)":70.98,"STS17 (ko-ko)":70.52,"STS17 (nl-en)":68.12,"STS22 (ar)":42.57,"STS22 (de)":25.69,"STS22 (de-en)":32.35,"STS22 (de-fr)":37.41,"STS22 (de-pl)":15.67,"STS22 (es)":54.92,"STS22 (es-en)":54.34,"STS22 (es-it)":42.21,"STS22 (fr)":58.61,"STS22 (fr-pl)":39.44,"STS22 (it)":60.31,"STS22 (pl)":18.34,"STS22 (pl-en)":53.63,"STS22 (ru)":39.24,"STS22 (tr)":36.97,"STS22 (zh-en)":46.19,"STSBenchmark":69.77} -{"index":159,"Rank":14,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.04,"STS17 (ar-ar)":80.6,"STS17 (en-ar)":72.6,"STS17 (en-de)":40.34,"STS17 (en-tr)":6.79,"STS17 (es-en)":81.8,"STS17 (es-es)":85.65,"STS17 (fr-en)":79.94,"STS17 (it-en)":34.8,"STS17 (ko-ko)":57.28,"STS17 (nl-en)":33.58,"STS22 (ar)":54.82,"STS22 (de)":26.63,"STS22 (de-en)":49.55,"STS22 (de-fr)":22.36,"STS22 (de-pl)":35.32,"STS22 (es)":56.31,"STS22 (es-en)":71.03,"STS22 (es-it)":61.3,"STS22 (fr)":61.35,"STS22 (fr-pl)":73.25,"STS22 (it)":62.61,"STS22 (pl)":15.06,"STS22 (pl-en)":43.72,"STS22 (ru)":28.77,"STS22 (tr)":22.11,"STS22 (zh-en)":63.9,"STSBenchmark":83.63} -{"index":235,"Rank":15,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":50.06,"STS17 (ar-ar)":11.13,"STS17 (en-ar)":-3.93,"STS17 (en-de)":79.04,"STS17 (en-tr)":13.61,"STS17 (es-en)":71.72,"STS17 (es-es)":83.42,"STS17 (fr-en)":71.38,"STS17 (it-en)":69.5,"STS17 (ko-ko)":9.61,"STS17 (nl-en)":66.12,"STS22 (ar)":29.6,"STS22 (de)":47.72,"STS22 (de-en)":49.64,"STS22 (de-fr)":62.21,"STS22 (de-pl)":34.34,"STS22 (es)":58.16,"STS22 (es-en)":69.15,"STS22 (es-it)":65.26,"STS22 (fr)":77.49,"STS22 (fr-pl)":50.71,"STS22 (it)":66.91,"STS22 (pl)":27.04,"STS22 (pl-en)":58.85,"STS22 (ru)":26.63,"STS22 (tr)":43.36,"STS22 (zh-en)":29.0,"STSBenchmark":83.93} -{"index":227,"Rank":16,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":49.47,"STS17 (ar-ar)":9.06,"STS17 (en-ar)":-3.22,"STS17 (en-de)":70.38,"STS17 (en-tr)":17.17,"STS17 (es-en)":60.24,"STS17 (es-es)":81.93,"STS17 (fr-en)":62.17,"STS17 (it-en)":59.11,"STS17 (ko-ko)":8.9,"STS17 (nl-en)":56.91,"STS22 (ar)":37.66,"STS22 (de)":50.58,"STS22 (de-en)":53.63,"STS22 (de-fr)":55.72,"STS22 (de-pl)":27.99,"STS22 (es)":59.14,"STS22 (es-en)":69.99,"STS22 (es-it)":60.94,"STS22 (fr)":79.43,"STS22 (fr-pl)":61.98,"STS22 (it)":67.14,"STS22 (pl)":33.74,"STS22 (pl-en)":60.18,"STS22 (ru)":32.69,"STS22 (tr)":55.79,"STS22 (zh-en)":28.85,"STSBenchmark":77.65} -{"index":49,"Rank":17,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.36,"STS17 (ar-ar)":46.8,"STS17 (en-ar)":-0.78,"STS17 (en-de)":47.5,"STS17 (en-tr)":4.18,"STS17 (es-en)":44.27,"STS17 (es-es)":79.22,"STS17 (fr-en)":47.15,"STS17 (it-en)":42.65,"STS17 (ko-ko)":39.79,"STS17 (nl-en)":36.6,"STS22 (ar)":25.06,"STS22 (de)":39.49,"STS22 (de-en)":54.22,"STS22 (de-fr)":48.91,"STS22 (de-pl)":33.04,"STS22 (es)":59.47,"STS22 (es-en)":66.65,"STS22 (es-it)":64.37,"STS22 (fr)":79.88,"STS22 (fr-pl)":39.44,"STS22 (it)":68.15,"STS22 (pl)":35.38,"STS22 (pl-en)":62.7,"STS22 (ru)":30.62,"STS22 (tr)":45.65,"STS22 (zh-en)":49.25,"STSBenchmark":88.96} -{"index":226,"Rank":18,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":46.79,"STS17 (ar-ar)":10.19,"STS17 (en-ar)":-5.77,"STS17 (en-de)":67.43,"STS17 (en-tr)":8.75,"STS17 (es-en)":54.96,"STS17 (es-es)":82.74,"STS17 (fr-en)":60.5,"STS17 (it-en)":46.26,"STS17 (ko-ko)":8.96,"STS17 (nl-en)":47.48,"STS22 (ar)":34.97,"STS22 (de)":51.7,"STS22 (de-en)":48.76,"STS22 (de-fr)":57.5,"STS22 (de-pl)":32.76,"STS22 (es)":57.49,"STS22 (es-en)":67.76,"STS22 (es-it)":57.18,"STS22 (fr)":78.7,"STS22 (fr-pl)":61.98,"STS22 (it)":67.67,"STS22 (pl)":30.68,"STS22 (pl-en)":54.17,"STS22 (ru)":15.36,"STS22 (tr)":58.12,"STS22 (zh-en)":29.42,"STSBenchmark":77.6} -{"index":234,"Rank":19,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":44.35,"STS17 (ar-ar)":10.75,"STS17 (en-ar)":-4.71,"STS17 (en-de)":73.62,"STS17 (en-tr)":-0.42,"STS17 (es-en)":62.62,"STS17 (es-es)":82.74,"STS17 (fr-en)":67.86,"STS17 (it-en)":51.86,"STS17 (ko-ko)":9.44,"STS17 (nl-en)":45.95,"STS22 (ar)":27.01,"STS22 (de)":43.73,"STS22 (de-en)":49.93,"STS22 (de-fr)":61.58,"STS22 (de-pl)":38.83,"STS22 (es)":57.68,"STS22 (es-en)":68.09,"STS22 (es-it)":61.58,"STS22 (fr)":75.01,"STS22 (fr-pl)":5.63,"STS22 (it)":62.01,"STS22 (pl)":25.0,"STS22 (pl-en)":51.72,"STS22 (ru)":14.21,"STS22 (tr)":47.3,"STS22 (zh-en)":23.1,"STSBenchmark":85.36} -{"index":233,"Rank":20,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":41.17,"STS17 (ar-ar)":13.36,"STS17 (en-ar)":-5.65,"STS17 (en-de)":67.11,"STS17 (en-tr)":-0.02,"STS17 (es-en)":47.72,"STS17 (es-es)":79.94,"STS17 (fr-en)":56.61,"STS17 (it-en)":30.46,"STS17 (ko-ko)":10.06,"STS17 (nl-en)":36.46,"STS22 (ar)":31.2,"STS22 (de)":42.08,"STS22 (de-en)":46.9,"STS22 (de-fr)":55.04,"STS22 (de-pl)":33.94,"STS22 (es)":53.81,"STS22 (es-en)":65.19,"STS22 (es-it)":55.29,"STS22 (fr)":77.69,"STS22 (fr-pl)":28.17,"STS22 (it)":60.65,"STS22 (pl)":24.42,"STS22 (pl-en)":42.97,"STS22 (ru)":12.13,"STS22 (tr)":40.45,"STS22 (zh-en)":20.15,"STSBenchmark":85.52} -{"index":219,"Rank":21,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":37.71,"STS17 (ar-ar)":50.89,"STS17 (en-ar)":-4.28,"STS17 (en-de)":35.82,"STS17 (en-tr)":4.5,"STS17 (es-en)":16.31,"STS17 (es-es)":76.12,"STS17 (fr-en)":37.09,"STS17 (it-en)":24.45,"STS17 (ko-ko)":43.39,"STS17 (nl-en)":29.0,"STS22 (ar)":22.64,"STS22 (de)":31.04,"STS22 (de-en)":44.04,"STS22 (de-fr)":30.07,"STS22 (de-pl)":4.93,"STS22 (es)":54.78,"STS22 (es-en)":53.42,"STS22 (es-it)":44.27,"STS22 (fr)":77.0,"STS22 (fr-pl)":50.71,"STS22 (it)":60.4,"STS22 (pl)":26.77,"STS22 (pl-en)":32.8,"STS22 (ru)":14.72,"STS22 (tr)":33.69,"STS22 (zh-en)":41.64,"STSBenchmark":82.03} -{"index":72,"Rank":22,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.32,"STS17 (ar-ar)":55.62,"STS17 (en-ar)":8.21,"STS17 (en-de)":30.18,"STS17 (en-tr)":1.04,"STS17 (es-en)":28.78,"STS17 (es-es)":71.88,"STS17 (fr-en)":26.34,"STS17 (it-en)":20.73,"STS17 (ko-ko)":52.39,"STS17 (nl-en)":25.05,"STS22 (ar)":28.19,"STS22 (de)":21.99,"STS22 (de-en)":53.07,"STS22 (de-fr)":32.97,"STS22 (de-pl)":20.45,"STS22 (es)":49.81,"STS22 (es-en)":49.51,"STS22 (es-it)":45.78,"STS22 (fr)":67.66,"STS22 (fr-pl)":61.98,"STS22 (it)":48.25,"STS22 (pl)":23.31,"STS22 (pl-en)":36.8,"STS22 (ru)":9.07,"STS22 (tr)":34.66,"STS22 (zh-en)":28.68,"STSBenchmark":75.34} -{"index":218,"Rank":23,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":34.57,"STS17 (ar-ar)":58.71,"STS17 (en-ar)":0.54,"STS17 (en-de)":27.54,"STS17 (en-tr)":0.43,"STS17 (es-en)":22.01,"STS17 (es-es)":78.37,"STS17 (fr-en)":30.7,"STS17 (it-en)":24.28,"STS17 (ko-ko)":43.37,"STS17 (nl-en)":24.51,"STS22 (ar)":17.54,"STS22 (de)":22.53,"STS22 (de-en)":42.86,"STS22 (de-fr)":43.52,"STS22 (de-pl)":1.63,"STS22 (es)":43.98,"STS22 (es-en)":53.99,"STS22 (es-it)":40.71,"STS22 (fr)":69.51,"STS22 (fr-pl)":16.9,"STS22 (it)":47.48,"STS22 (pl)":19.22,"STS22 (pl-en)":42.67,"STS22 (ru)":11.19,"STS22 (tr)":21.6,"STS22 (zh-en)":44.39,"STSBenchmark":83.09} -{"index":73,"Rank":24,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":27.72,"STS17 (ar-ar)":54.16,"STS17 (en-ar)":1.72,"STS17 (en-de)":25.48,"STS17 (en-tr)":2.09,"STS17 (es-en)":21.93,"STS17 (es-es)":67.8,"STS17 (fr-en)":18.91,"STS17 (it-en)":16.39,"STS17 (ko-ko)":45.66,"STS17 (nl-en)":23.49,"STS22 (ar)":5.17,"STS22 (de)":11.0,"STS22 (de-en)":53.93,"STS22 (de-fr)":25.11,"STS22 (de-pl)":20.94,"STS22 (es)":43.05,"STS22 (es-en)":32.74,"STS22 (es-it)":35.99,"STS22 (fr)":54.56,"STS22 (fr-pl)":5.63,"STS22 (it)":33.68,"STS22 (pl)":14.91,"STS22 (pl-en)":20.54,"STS22 (ru)":3.36,"STS22 (tr)":3.82,"STS22 (zh-en)":26.71,"STSBenchmark":79.54} -{"index":221,"Rank":25,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":25.1,"STS17 (ar-ar)":27.14,"STS17 (en-ar)":6.9,"STS17 (en-de)":11.59,"STS17 (en-tr)":6.46,"STS17 (es-en)":10.86,"STS17 (es-es)":55.45,"STS17 (fr-en)":16.02,"STS17 (it-en)":19.87,"STS17 (ko-ko)":8.08,"STS17 (nl-en)":24.92,"STS22 (ar)":19.57,"STS22 (de)":17.31,"STS22 (de-en)":26.03,"STS22 (de-fr)":10.26,"STS22 (de-pl)":16.94,"STS22 (es)":48.89,"STS22 (es-en)":51.79,"STS22 (es-it)":25.24,"STS22 (fr)":53.92,"STS22 (fr-pl)":39.44,"STS22 (it)":39.43,"STS22 (pl)":13.56,"STS22 (pl-en)":25.36,"STS22 (ru)":1.11,"STS22 (tr)":31.73,"STS22 (zh-en)":8.44,"STSBenchmark":61.26} -{"index":223,"Rank":26,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":24.28,"STS17 (ar-ar)":13.78,"STS17 (en-ar)":9.08,"STS17 (en-de)":-3.11,"STS17 (en-tr)":-0.45,"STS17 (es-en)":-8.18,"STS17 (es-es)":48.23,"STS17 (fr-en)":5.81,"STS17 (it-en)":3.64,"STS17 (ko-ko)":2.54,"STS17 (nl-en)":0.44,"STS22 (ar)":32.42,"STS22 (de)":33.04,"STS22 (de-en)":28.65,"STS22 (de-fr)":14.77,"STS22 (de-pl)":11.21,"STS22 (es)":48.53,"STS22 (es-en)":26.97,"STS22 (es-it)":41.1,"STS22 (fr)":49.43,"STS22 (fr-pl)":39.44,"STS22 (it)":57.77,"STS22 (pl)":12.47,"STS22 (pl-en)":45.55,"STS22 (ru)":19.44,"STS22 (tr)":47.38,"STS22 (zh-en)":14.05,"STSBenchmark":61.55} -{"index":0,"Rank":27,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.33} -{"index":1,"Rank":28,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.99} -{"index":2,"Rank":29,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":74.85} -{"index":3,"Rank":30,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.74,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":4,"Rank":31,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":70.51,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":5,"Rank":32,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":79.99,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":6,"Rank":33,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.22} -{"index":7,"Rank":34,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":83.75,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":8,"Rank":35,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.86} -{"index":9,"Rank":36,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.56} -{"index":10,"Rank":37,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.76,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":12,"Rank":38,"Model":"Arabic_text_embedding_for_sts<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":85.05,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":13,"Rank":39,"Model":"arabic_text_embedding_sts_arabertv02_arabicnlitriplet<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":84.96,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":14,"Rank":40,"Model":"llm2vec-croissant-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":15,"Rank":41,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":67.83,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":16,"Rank":42,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.35} -{"index":17,"Rank":43,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.84,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.83,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.38} -{"index":18,"Rank":44,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.58,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":41.02,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.85} -{"index":19,"Rank":45,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.04} -{"index":20,"Rank":46,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.07} -{"index":21,"Rank":47,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42} -{"index":22,"Rank":48,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.42} -{"index":23,"Rank":49,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.52} -{"index":24,"Rank":50,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":83.28,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":42.79,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.25} -{"index":25,"Rank":51,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.86} -{"index":26,"Rank":52,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54} -{"index":27,"Rank":53,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54} -{"index":28,"Rank":54,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54} -{"index":29,"Rank":55,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54} -{"index":30,"Rank":56,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.58} -{"index":31,"Rank":57,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.55} -{"index":32,"Rank":58,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.8,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.46} -{"index":33,"Rank":59,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.76,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.79} -{"index":34,"Rank":60,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":33.88,"STSBenchmark":""} -{"index":35,"Rank":61,"Model":"STS-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":0.98} -{"index":36,"Rank":62,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":40.31,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":37,"Rank":63,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":40.4,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":38,"Rank":64,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":38.77,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":39,"Rank":65,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.1} -{"index":40,"Rank":66,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.43} -{"index":41,"Rank":67,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":75.57} -{"index":42,"Rank":68,"Model":"2024-06-15_10-09-42<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":43,"Rank":69,"Model":"2024-06-17_21-37-12<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":44,"Rank":70,"Model":"2024-06-19_08-22-22<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":45,"Rank":71,"Model":"2024-06-19_10-03-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":46,"Rank":72,"Model":"2024-06-19_21-12-17<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":47,"Rank":73,"Model":"2024-06-19_22-23-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":48,"Rank":74,"Model":"neural-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.91} -{"index":50,"Rank":75,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.14,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":51,"Rank":76,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":84.64,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":52,"Rank":77,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.84,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":53,"Rank":78,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.57,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":54,"Rank":79,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.81} -{"index":56,"Rank":80,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.72} -{"index":57,"Rank":81,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.32} -{"index":58,"Rank":82,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.05} -{"index":59,"Rank":83,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.44} -{"index":60,"Rank":84,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.65} -{"index":61,"Rank":85,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":80.42} -{"index":62,"Rank":86,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.16} -{"index":63,"Rank":87,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":73.36} -{"index":64,"Rank":88,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.34} -{"index":65,"Rank":89,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.25} -{"index":66,"Rank":90,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.21} -{"index":67,"Rank":91,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":76.72} -{"index":68,"Rank":92,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.79} -{"index":69,"Rank":93,"Model":"test24<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.93} -{"index":70,"Rank":94,"Model":"test25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.61} -{"index":71,"Rank":95,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.59} -{"index":74,"Rank":96,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.21} -{"index":75,"Rank":97,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.39} -{"index":76,"Rank":98,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.67} -{"index":77,"Rank":99,"Model":"Arabert-all-nli-triplet-Matryoshka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":83.16,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":58.29,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":78,"Rank":100,"Model":"Arabic-MiniLM-L12-v2-all-nli-triplet<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":81.11,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":52.41,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":79,"Rank":101,"Model":"Arabic-Triplet-Matryoshka-V2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":85.31,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":80,"Rank":102,"Model":"Arabic-all-nli-triplet-Matryoshka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":82.4,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":51.38,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":81,"Rank":103,"Model":"Arabic-labse-Matryoshka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":82.47,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":57.26,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":82,"Rank":104,"Model":"Arabic-mpnet-base-all-nli-triplet<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":79.93,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":52.44,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":83,"Rank":105,"Model":"Marbert-all-nli-triplet-Matryoshka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":82.18,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":58.08,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":84,"Rank":106,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":83.31,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":85,"Rank":107,"Model":"st-polish-kartonberta-base-alpha-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":36.78,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":86,"Rank":108,"Model":"e5-large-v2-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.74} -{"index":87,"Rank":109,"Model":"bge_m3e_stella<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":61.83} -{"index":88,"Rank":110,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.6} -{"index":89,"Rank":111,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.0} -{"index":90,"Rank":112,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":52.67} -{"index":91,"Rank":113,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":75.68} -{"index":92,"Rank":114,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":74.1} -{"index":93,"Rank":115,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.9} -{"index":94,"Rank":116,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.19} -{"index":95,"Rank":117,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.18} -{"index":96,"Rank":118,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.32} -{"index":97,"Rank":119,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.5} -{"index":98,"Rank":120,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.38} -{"index":99,"Rank":121,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.06} -{"index":100,"Rank":122,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":78.77,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":101,"Rank":123,"Model":"German_Semantic_STS_V2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":102,"Rank":124,"Model":"UAE-Large-V1-Q5_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.06} -{"index":104,"Rank":125,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.94} -{"index":105,"Rank":126,"Model":"bge-large-en-v1.5-Q4_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.52} -{"index":106,"Rank":127,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.32} -{"index":107,"Rank":128,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.4} -{"index":108,"Rank":129,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.3} -{"index":109,"Rank":130,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.08} -{"index":110,"Rank":131,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.98} -{"index":111,"Rank":132,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42} -{"index":112,"Rank":133,"Model":"snowflake-arctic-embed-l-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":75.68} -{"index":113,"Rank":134,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":47.29} -{"index":114,"Rank":135,"Model":"sgpt-bloom-1b7-nli<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":73.13,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":116,"Rank":136,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.85} -{"index":117,"Rank":137,"Model":"NoInstruct-small-Embedding-v0-Q4_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.98} -{"index":118,"Rank":138,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.58,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":41.02,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.85} -{"index":119,"Rank":139,"Model":"cai-lunaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":71.4} -{"index":120,"Rank":140,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54} -{"index":121,"Rank":141,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":77.54,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":122,"Rank":142,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.73,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":123,"Rank":143,"Model":"embedder-100p<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.2} -{"index":124,"Rank":144,"Model":"mxbai-embed-large-v1-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.1} -{"index":125,"Rank":145,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":75.98} -{"index":126,"Rank":146,"Model":"jina-embeddings-v2-base-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.84} -{"index":127,"Rank":147,"Model":"jina-embeddings-v2-small-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.04} -{"index":128,"Rank":148,"Model":"stella-base-en-v2-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.45} -{"index":129,"Rank":149,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.23} -{"index":130,"Rank":150,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.74} -{"index":131,"Rank":151,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.52} -{"index":133,"Rank":152,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":65.37,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":134,"Rank":153,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":55.15,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":135,"Rank":154,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":48.52,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":136,"Rank":155,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":39.05,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":137,"Rank":156,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":56.47,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":138,"Rank":157,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.43} -{"index":139,"Rank":158,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.88} -{"index":140,"Rank":159,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.56} -{"index":141,"Rank":160,"Model":"bge-large-en-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.52} -{"index":142,"Rank":161,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.45} -{"index":143,"Rank":162,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.18} -{"index":144,"Rank":163,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.52} -{"index":145,"Rank":164,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.21} -{"index":146,"Rank":165,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.74} -{"index":148,"Rank":166,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.36} -{"index":149,"Rank":167,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.95} -{"index":152,"Rank":168,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.38} -{"index":154,"Rank":169,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":39.73,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":155,"Rank":170,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":38.69,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":156,"Rank":171,"Model":"UAE-Large-V1-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.06} -{"index":158,"Rank":172,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.71} -{"index":160,"Rank":173,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.02} -{"index":161,"Rank":174,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.33} -{"index":162,"Rank":175,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.57} -{"index":163,"Rank":176,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.6} -{"index":164,"Rank":177,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.2} -{"index":165,"Rank":178,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":86.72,"STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":59.07,"STS22 (de-en)":55.97,"STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.14} -{"index":166,"Rank":179,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.84} -{"index":167,"Rank":180,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":86.49,"STS17 (es-es)":88.25,"STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":67.97,"STS22 (es-en)":78.8,"STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.84} -{"index":168,"Rank":181,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.04} -{"index":169,"Rank":182,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.67} -{"index":170,"Rank":183,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42} -{"index":171,"Rank":184,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42} -{"index":172,"Rank":185,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42} -{"index":173,"Rank":186,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42} -{"index":174,"Rank":187,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.69} -{"index":175,"Rank":188,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.5} -{"index":176,"Rank":189,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.46} -{"index":177,"Rank":190,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.77} -{"index":178,"Rank":191,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.7,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":179,"Rank":192,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":75.66,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":180,"Rank":193,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":78.68,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":181,"Rank":194,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":80.8,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":182,"Rank":195,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.35,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":183,"Rank":196,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.2} -{"index":184,"Rank":197,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.1} -{"index":185,"Rank":198,"Model":"b1ade-embed-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.77} -{"index":186,"Rank":199,"Model":"bge-base-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.79} -{"index":187,"Rank":200,"Model":"bge-base-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.93} -{"index":188,"Rank":201,"Model":"bge-large-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.89} -{"index":189,"Rank":202,"Model":"bge-large-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.88} -{"index":190,"Rank":203,"Model":"bge-small-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.31} -{"index":191,"Rank":204,"Model":"bge-small-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.77} -{"index":192,"Rank":205,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.84,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.83,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.38} -{"index":193,"Rank":206,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.58,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":41.02,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.85} -{"index":194,"Rank":207,"Model":"mmarco-bert-base-italian-uncased<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":69.44,"STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":195,"Rank":208,"Model":"mmarco-sentence-flare-it<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":37.93,"STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":196,"Rank":209,"Model":"stsbm-sentence-flare-it<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":65.71,"STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":197,"Rank":210,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54} -{"index":198,"Rank":211,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.42} -{"index":199,"Rank":212,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.96} -{"index":200,"Rank":213,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.28} -{"index":201,"Rank":214,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.8} -{"index":202,"Rank":215,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.14} -{"index":203,"Rank":216,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.46} -{"index":204,"Rank":217,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.47} -{"index":205,"Rank":218,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.81} -{"index":206,"Rank":219,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.14} -{"index":207,"Rank":220,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.25} -{"index":208,"Rank":221,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":76.52} -{"index":209,"Rank":222,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.15} -{"index":210,"Rank":223,"Model":"mmlw-e5-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.36,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":211,"Rank":224,"Model":"mmlw-e5-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.4,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":212,"Rank":225,"Model":"mmlw-e5-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.63,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":213,"Rank":226,"Model":"mmlw-roberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.36,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":214,"Rank":227,"Model":"mmlw-roberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":39.32,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":215,"Rank":228,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.36,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":216,"Rank":229,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":37.34,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":220,"Rank":230,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.42} -{"index":222,"Rank":231,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":61.54} -{"index":225,"Rank":232,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.58} -{"index":228,"Rank":233,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.73} -{"index":229,"Rank":234,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":76.97} -{"index":230,"Rank":235,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.62,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":231,"Rank":236,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","STS17 (ar-ar)":79.16,"STS17 (en-ar)":81.22,"STS17 (en-de)":84.22,"STS17 (en-tr)":76.74,"STS17 (es-en)":84.44,"STS17 (es-es)":85.56,"STS17 (fr-en)":76.59,"STS17 (it-en)":82.35,"STS17 (ko-ko)":77.03,"STS17 (nl-en)":81.71,"STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":70.55,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":33.73,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.42} -{"index":232,"Rank":237,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":79.1,"STS17 (en-ar)":80.85,"STS17 (en-de)":83.28,"STS17 (en-tr)":74.9,"STS17 (es-en)":86.11,"STS17 (es-es)":85.14,"STS17 (fr-en)":81.17,"STS17 (it-en)":84.24,"STS17 (ko-ko)":83.41,"STS17 (nl-en)":82.51,"STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.3,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":33.64,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.82} -{"index":236,"Rank":238,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":76.8,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.01} -{"index":237,"Rank":239,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":27.95,"STS22 (de)":8.16,"STS22 (de-en)":21.55,"STS22 (de-fr)":17.5,"STS22 (de-pl)":25.53,"STS22 (es)":45.31,"STS22 (es-en)":42.77,"STS22 (es-it)":32.83,"STS22 (fr)":42.0,"STS22 (fr-pl)":39.44,"STS22 (it)":39.69,"STS22 (pl)":9.71,"STS22 (pl-en)":42.08,"STS22 (ru)":60.06,"STS22 (tr)":15.46,"STS22 (zh-en)":31.25,"STSBenchmark":""} -{"index":239,"Rank":240,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.1,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":240,"Rank":241,"Model":"gte-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.73} -{"index":241,"Rank":242,"Model":"gte-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.07} -{"index":242,"Rank":243,"Model":"gte-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.57} -{"index":243,"Rank":244,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":44.39} -{"index":244,"Rank":245,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":68.04} -{"index":245,"Rank":246,"Model":"universal-sentence-encoder-4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.08} -{"index":246,"Rank":247,"Model":"universal-sentence-encoder-large-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.93} -{"index":247,"Rank":248,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":77.91,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":80.28} -{"index":248,"Rank":249,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":71.11,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.24} -{"index":249,"Rank":250,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.77} -{"index":250,"Rank":251,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.69} -{"index":251,"Rank":252,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":0.8} -{"index":252,"Rank":253,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":56.72,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":253,"Rank":254,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":55.49,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":254,"Rank":255,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":80.31,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"index":255,"Rank":256,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.92} -{"index":256,"Rank":257,"Model":"gte-large-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.4} -{"index":257,"Rank":258,"Model":"gte-large-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.45} -{"index":258,"Rank":259,"Model":"gte-small-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.13} -{"index":259,"Rank":260,"Model":"bi-cse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":70.01,"STSBenchmark":85.99} -{"index":260,"Rank":261,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.78} -{"index":261,"Rank":262,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.32} -{"index":262,"Rank":263,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.02} -{"index":263,"Rank":264,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.08} -{"index":264,"Rank":265,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.09,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":64.5,"STS22 (zh-en)":"","STSBenchmark":83.17} -{"index":265,"Rank":266,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.34} -{"index":266,"Rank":267,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.56} -{"index":267,"Rank":268,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.24} -{"index":268,"Rank":269,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.54} +{"Rank":1,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":67.9,"STS17 (ar-ar)":69.07,"STS17 (en-ar)":74.51,"STS17 (en-de)":73.85,"STS17 (en-tr)":72.07,"STS17 (es-en)":65.71,"STS17 (es-es)":80.83,"STS17 (fr-en)":76.98,"STS17 (it-en)":76.99,"STS17 (ko-ko)":71.32,"STS17 (nl-en)":75.22,"STS22 (ar)":57.67,"STS22 (de)":48.58,"STS22 (de-en)":50.14,"STS22 (de-fr)":53.28,"STS22 (de-pl)":58.69,"STS22 (es)":63.18,"STS22 (es-en)":71.86,"STS22 (es-it)":69.69,"STS22 (fr)":77.95,"STS22 (fr-pl)":61.98,"STS22 (it)":72.22,"STS22 (pl)":39.28,"STS22 (pl-en)":69.41,"STS22 (ru)":57.49,"STS22 (tr)":58.15,"STS22 (zh-en)":64.02,"STSBenchmark":72.25,"STSBenchmarkMultilingualSTS (cmn-Hans)":69.5,"STSBenchmarkMultilingualSTS (deu-Latn)":72.43,"STSBenchmarkMultilingualSTS (en)":72.25,"STSBenchmarkMultilingualSTS (fr)":75.1,"STSBenchmarkMultilingualSTS (fra-Latn)":75.1,"STSBenchmarkMultilingualSTS (ita-Latn)":72.97,"STSBenchmarkMultilingualSTS (nld-Latn)":70.22,"STSBenchmarkMultilingualSTS (pol-Latn)":72.58,"STSBenchmarkMultilingualSTS (por-Latn)":71.65,"STSBenchmarkMultilingualSTS (rus-Cyrl)":73.06,"STSBenchmarkMultilingualSTS (spa-Latn)":72.92} +{"Rank":2,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":44.27,"STS17 (ar-ar)":50.89,"STS17 (en-ar)":-4.28,"STS17 (en-de)":35.82,"STS17 (en-tr)":4.5,"STS17 (es-en)":16.31,"STS17 (es-es)":76.12,"STS17 (fr-en)":37.09,"STS17 (it-en)":24.45,"STS17 (ko-ko)":43.39,"STS17 (nl-en)":29.0,"STS22 (ar)":22.64,"STS22 (de)":31.04,"STS22 (de-en)":44.04,"STS22 (de-fr)":30.07,"STS22 (de-pl)":4.93,"STS22 (es)":54.78,"STS22 (es-en)":53.42,"STS22 (es-it)":44.27,"STS22 (fr)":77.0,"STS22 (fr-pl)":50.71,"STS22 (it)":60.4,"STS22 (pl)":26.77,"STS22 (pl-en)":32.8,"STS22 (ru)":14.72,"STS22 (tr)":33.69,"STS22 (zh-en)":41.64,"STSBenchmark":82.03,"STSBenchmarkMultilingualSTS (cmn-Hans)":39.74,"STSBenchmarkMultilingualSTS (deu-Latn)":62.4,"STSBenchmarkMultilingualSTS (en)":82.03,"STSBenchmarkMultilingualSTS (fr)":64.93,"STSBenchmarkMultilingualSTS (fra-Latn)":64.93,"STSBenchmarkMultilingualSTS (ita-Latn)":59.24,"STSBenchmarkMultilingualSTS (nld-Latn)":55.46,"STSBenchmarkMultilingualSTS (pol-Latn)":56.42,"STSBenchmarkMultilingualSTS (por-Latn)":61.56,"STSBenchmarkMultilingualSTS (rus-Cyrl)":55.55,"STSBenchmarkMultilingualSTS (spa-Latn)":61.62} +{"Rank":3,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.33,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":4,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.99,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":5,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":74.85,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":6,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.74,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":79.72,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":7,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":70.51,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":76.43,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":8,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":79.99,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":79.02,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":9,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.22,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":10,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":83.75,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":83.02,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":11,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.86,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":12,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.56,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":13,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.76,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":82.72,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":14,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","STS17 (ar-ar)":67.47,"STS17 (en-ar)":65.05,"STS17 (en-de)":66.66,"STS17 (en-tr)":70.05,"STS17 (es-en)":55.3,"STS17 (es-es)":79.67,"STS17 (fr-en)":70.82,"STS17 (it-en)":70.98,"STS17 (ko-ko)":70.52,"STS17 (nl-en)":68.12,"STS22 (ar)":42.57,"STS22 (de)":25.69,"STS22 (de-en)":32.35,"STS22 (de-fr)":37.41,"STS22 (de-pl)":15.67,"STS22 (es)":54.92,"STS22 (es-en)":54.34,"STS22 (es-it)":42.21,"STS22 (fr)":58.61,"STS22 (fr-pl)":39.44,"STS22 (it)":60.31,"STS22 (pl)":18.34,"STS22 (pl-en)":53.63,"STS22 (ru)":39.24,"STS22 (tr)":36.97,"STS22 (zh-en)":46.19,"STSBenchmark":69.77,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":69.82,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":15,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.35,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":16,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":79.27,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":17,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.8,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":76.48,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":18,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.76,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":81.84,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":19,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":61.6,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":20,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":53.76,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":21,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":66.71,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":22,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":40.31,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":52.25,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":23,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":40.4,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":52.25,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":24,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":38.77,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":52.25,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":25,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.1,"STSBenchmarkMultilingualSTS (cmn-Hans)":75.27,"STSBenchmarkMultilingualSTS (deu-Latn)":77.57,"STSBenchmarkMultilingualSTS (en)":83.12,"STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":77.48,"STSBenchmarkMultilingualSTS (ita-Latn)":76.24,"STSBenchmarkMultilingualSTS (nld-Latn)":74.83,"STSBenchmarkMultilingualSTS (pol-Latn)":74.67,"STSBenchmarkMultilingualSTS (por-Latn)":76.61,"STSBenchmarkMultilingualSTS (rus-Cyrl)":76.19,"STSBenchmarkMultilingualSTS (spa-Latn)":79.51} +{"Rank":26,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.72,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":27,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.32,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":28,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.05,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":29,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.44,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":30,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.65,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":31,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":80.42,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":32,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.16,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":33,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":73.36,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":34,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":78.77,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":79.23,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":35,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":36,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":37,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":47.29,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":38,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":73.02,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":39,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":40,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":41,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":77.54,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":81.64,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":42,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.73,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":85.79,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":43,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":81.81,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":44,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":82.96,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":45,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":58.45,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":46,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":65.37,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":37.14,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":47,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":55.15,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":33.41,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":48,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":48.52,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":15.66,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":49,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":39.05,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":52.25,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":50,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":56.47,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":54.97,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":51,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":69.82,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":61.87,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":84.25,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":52,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":34.07,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.64,"STSBenchmarkMultilingualSTS (cmn-Hans)":79.87,"STSBenchmarkMultilingualSTS (deu-Latn)":79.68,"STSBenchmarkMultilingualSTS (en)":85.64,"STSBenchmarkMultilingualSTS (fr)":80.62,"STSBenchmarkMultilingualSTS (fra-Latn)":80.85,"STSBenchmarkMultilingualSTS (ita-Latn)":78.09,"STSBenchmarkMultilingualSTS (nld-Latn)":75.96,"STSBenchmarkMultilingualSTS (pol-Latn)":74.93,"STSBenchmarkMultilingualSTS (por-Latn)":67.16,"STSBenchmarkMultilingualSTS (rus-Cyrl)":79.33,"STSBenchmarkMultilingualSTS (spa-Latn)":81.75} +{"Rank":53,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":34.66,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.29,"STSBenchmarkMultilingualSTS (cmn-Hans)":81.22,"STSBenchmarkMultilingualSTS (deu-Latn)":84.27,"STSBenchmarkMultilingualSTS (en)":87.29,"STSBenchmarkMultilingualSTS (fr)":82.53,"STSBenchmarkMultilingualSTS (fra-Latn)":83.28,"STSBenchmarkMultilingualSTS (ita-Latn)":81.75,"STSBenchmarkMultilingualSTS (nld-Latn)":81.63,"STSBenchmarkMultilingualSTS (pol-Latn)":81.06,"STSBenchmarkMultilingualSTS (por-Latn)":73.31,"STSBenchmarkMultilingualSTS (rus-Cyrl)":83.05,"STSBenchmarkMultilingualSTS (spa-Latn)":83.81} +{"Rank":54,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":35.8,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.11,"STSBenchmarkMultilingualSTS (cmn-Hans)":78.49,"STSBenchmarkMultilingualSTS (deu-Latn)":79.17,"STSBenchmarkMultilingualSTS (en)":84.11,"STSBenchmarkMultilingualSTS (fr)":79.32,"STSBenchmarkMultilingualSTS (fra-Latn)":79.2,"STSBenchmarkMultilingualSTS (ita-Latn)":78.21,"STSBenchmarkMultilingualSTS (nld-Latn)":76.04,"STSBenchmarkMultilingualSTS (pol-Latn)":72.61,"STSBenchmarkMultilingualSTS (por-Latn)":77.39,"STSBenchmarkMultilingualSTS (rus-Cyrl)":78.24,"STSBenchmarkMultilingualSTS (spa-Latn)":80.31} +{"Rank":55,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":39.73,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":56,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":38.69,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":57,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":77.1,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":49.97,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":58,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":61.35,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":36.78,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":59,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.28,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":60,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.8,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":61,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.14,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":62,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.46,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":63,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.81,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":64,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.25,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":65,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":76.52,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":66,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.36,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":67,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":37.34,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":68,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":58.71,"STS17 (en-ar)":0.54,"STS17 (en-de)":27.54,"STS17 (en-tr)":0.43,"STS17 (es-en)":22.01,"STS17 (es-es)":78.37,"STS17 (fr-en)":30.7,"STS17 (it-en)":24.28,"STS17 (ko-ko)":43.37,"STS17 (nl-en)":24.51,"STS22 (ar)":17.54,"STS22 (de)":22.53,"STS22 (de-en)":42.86,"STS22 (de-fr)":43.52,"STS22 (de-pl)":1.63,"STS22 (es)":43.98,"STS22 (es-en)":53.99,"STS22 (es-it)":40.71,"STS22 (fr)":69.51,"STS22 (fr-pl)":16.9,"STS22 (it)":47.48,"STS22 (pl)":19.22,"STS22 (pl-en)":42.67,"STS22 (ru)":11.19,"STS22 (tr)":21.6,"STS22 (zh-en)":44.39,"STSBenchmark":83.09,"STSBenchmarkMultilingualSTS (cmn-Hans)":38.93,"STSBenchmarkMultilingualSTS (deu-Latn)":63.28,"STSBenchmarkMultilingualSTS (en)":83.09,"STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":66.68,"STSBenchmarkMultilingualSTS (ita-Latn)":60.71,"STSBenchmarkMultilingualSTS (nld-Latn)":60.03,"STSBenchmarkMultilingualSTS (pol-Latn)":60.2,"STSBenchmarkMultilingualSTS (por-Latn)":63.85,"STSBenchmarkMultilingualSTS (rus-Cyrl)":56.09,"STSBenchmarkMultilingualSTS (spa-Latn)":65.33} +{"Rank":69,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.42,"STSBenchmarkMultilingualSTS (cmn-Hans)":39.43,"STSBenchmarkMultilingualSTS (deu-Latn)":61.43,"STSBenchmarkMultilingualSTS (en)":83.42,"STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":65.15,"STSBenchmarkMultilingualSTS (ita-Latn)":62.72,"STSBenchmarkMultilingualSTS (nld-Latn)":57.01,"STSBenchmarkMultilingualSTS (pol-Latn)":52.36,"STSBenchmarkMultilingualSTS (por-Latn)":62.12,"STSBenchmarkMultilingualSTS (rus-Cyrl)":55.54,"STSBenchmarkMultilingualSTS (spa-Latn)":65.78} +{"Rank":70,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":27.14,"STS17 (en-ar)":6.9,"STS17 (en-de)":11.59,"STS17 (en-tr)":6.46,"STS17 (es-en)":10.86,"STS17 (es-es)":55.45,"STS17 (fr-en)":16.02,"STS17 (it-en)":19.87,"STS17 (ko-ko)":8.08,"STS17 (nl-en)":24.92,"STS22 (ar)":19.57,"STS22 (de)":17.31,"STS22 (de-en)":26.03,"STS22 (de-fr)":10.26,"STS22 (de-pl)":16.94,"STS22 (es)":48.89,"STS22 (es-en)":51.79,"STS22 (es-it)":25.24,"STS22 (fr)":53.92,"STS22 (fr-pl)":39.44,"STS22 (it)":39.43,"STS22 (pl)":13.56,"STS22 (pl-en)":25.36,"STS22 (ru)":1.11,"STS22 (tr)":31.73,"STS22 (zh-en)":8.44,"STSBenchmark":61.26,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":71,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":61.54,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":72,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":"","STS17 (ar-ar)":13.78,"STS17 (en-ar)":9.08,"STS17 (en-de)":-3.11,"STS17 (en-tr)":-0.45,"STS17 (es-en)":-8.18,"STS17 (es-es)":48.23,"STS17 (fr-en)":5.81,"STS17 (it-en)":3.64,"STS17 (ko-ko)":2.54,"STS17 (nl-en)":0.44,"STS22 (ar)":32.42,"STS22 (de)":33.04,"STS22 (de-en)":28.65,"STS22 (de-fr)":14.77,"STS22 (de-pl)":11.21,"STS22 (es)":48.53,"STS22 (es-en)":26.97,"STS22 (es-it)":41.1,"STS22 (fr)":49.43,"STS22 (fr-pl)":39.44,"STS22 (it)":57.77,"STS22 (pl)":12.47,"STS22 (pl-en)":45.55,"STS22 (ru)":19.44,"STS22 (tr)":47.38,"STS22 (zh-en)":14.05,"STSBenchmark":61.55,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":73,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","STS17 (ar-ar)":77.34,"STS17 (en-ar)":77.46,"STS17 (en-de)":80.24,"STS17 (en-tr)":74.34,"STS17 (es-en)":77.4,"STS17 (es-es)":83.71,"STS17 (fr-en)":79.28,"STS17 (it-en)":80.82,"STS17 (ko-ko)":76.4,"STS17 (nl-en)":80.51,"STS22 (ar)":49.04,"STS22 (de)":35.73,"STS22 (de-en)":47.51,"STS22 (de-fr)":60.76,"STS22 (de-pl)":36.09,"STS22 (es)":59.34,"STS22 (es-en)":68.96,"STS22 (es-it)":63.28,"STS22 (fr)":76.41,"STS22 (fr-pl)":61.98,"STS22 (it)":65.1,"STS22 (pl)":34.58,"STS22 (pl-en)":71.33,"STS22 (ru)":52.4,"STS22 (tr)":54.07,"STS22 (zh-en)":61.75,"STSBenchmark":80.75,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":77.49,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":74,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.58,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":75,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","STS17 (ar-ar)":10.19,"STS17 (en-ar)":-5.77,"STS17 (en-de)":67.43,"STS17 (en-tr)":8.75,"STS17 (es-en)":54.96,"STS17 (es-es)":82.74,"STS17 (fr-en)":60.5,"STS17 (it-en)":46.26,"STS17 (ko-ko)":8.96,"STS17 (nl-en)":47.48,"STS22 (ar)":34.97,"STS22 (de)":51.7,"STS22 (de-en)":48.76,"STS22 (de-fr)":57.5,"STS22 (de-pl)":32.76,"STS22 (es)":57.49,"STS22 (es-en)":67.76,"STS22 (es-it)":57.18,"STS22 (fr)":78.7,"STS22 (fr-pl)":61.98,"STS22 (it)":67.67,"STS22 (pl)":30.68,"STS22 (pl-en)":54.17,"STS22 (ru)":15.36,"STS22 (tr)":58.12,"STS22 (zh-en)":29.42,"STSBenchmark":77.6,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":76,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","STS17 (ar-ar)":9.06,"STS17 (en-ar)":-3.22,"STS17 (en-de)":70.38,"STS17 (en-tr)":17.17,"STS17 (es-en)":60.24,"STS17 (es-es)":81.93,"STS17 (fr-en)":62.17,"STS17 (it-en)":59.11,"STS17 (ko-ko)":8.9,"STS17 (nl-en)":56.91,"STS22 (ar)":37.66,"STS22 (de)":50.58,"STS22 (de-en)":53.63,"STS22 (de-fr)":55.72,"STS22 (de-pl)":27.99,"STS22 (es)":59.14,"STS22 (es-en)":69.99,"STS22 (es-it)":60.94,"STS22 (fr)":79.43,"STS22 (fr-pl)":61.98,"STS22 (it)":67.14,"STS22 (pl)":33.74,"STS22 (pl-en)":60.18,"STS22 (ru)":32.69,"STS22 (tr)":55.79,"STS22 (zh-en)":28.85,"STSBenchmark":77.65,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":77,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.73,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":78,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":76.97,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":79,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.62,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":63.85,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":80,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","STS17 (ar-ar)":79.16,"STS17 (en-ar)":81.22,"STS17 (en-de)":84.22,"STS17 (en-tr)":76.74,"STS17 (es-en)":84.44,"STS17 (es-es)":85.56,"STS17 (fr-en)":76.59,"STS17 (it-en)":82.35,"STS17 (ko-ko)":77.03,"STS17 (nl-en)":81.71,"STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":70.55,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":33.73,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.42,"STSBenchmarkMultilingualSTS (cmn-Hans)":80.47,"STSBenchmarkMultilingualSTS (deu-Latn)":78.87,"STSBenchmarkMultilingualSTS (en)":84.42,"STSBenchmarkMultilingualSTS (fr)":79.9,"STSBenchmarkMultilingualSTS (fra-Latn)":79.9,"STSBenchmarkMultilingualSTS (ita-Latn)":80.39,"STSBenchmarkMultilingualSTS (nld-Latn)":79.54,"STSBenchmarkMultilingualSTS (pol-Latn)":78.29,"STSBenchmarkMultilingualSTS (por-Latn)":80.16,"STSBenchmarkMultilingualSTS (rus-Cyrl)":79.32,"STSBenchmarkMultilingualSTS (spa-Latn)":81.1} +{"Rank":81,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":79.1,"STS17 (en-ar)":80.85,"STS17 (en-de)":83.28,"STS17 (en-tr)":74.9,"STS17 (es-en)":86.11,"STS17 (es-es)":85.14,"STS17 (fr-en)":81.17,"STS17 (it-en)":84.24,"STS17 (ko-ko)":83.41,"STS17 (nl-en)":82.51,"STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.3,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":33.64,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.82,"STSBenchmarkMultilingualSTS (cmn-Hans)":81.98,"STSBenchmarkMultilingualSTS (deu-Latn)":83.56,"STSBenchmarkMultilingualSTS (en)":86.82,"STSBenchmarkMultilingualSTS (fr)":84.69,"STSBenchmarkMultilingualSTS (fra-Latn)":84.69,"STSBenchmarkMultilingualSTS (ita-Latn)":84.09,"STSBenchmarkMultilingualSTS (nld-Latn)":83.36,"STSBenchmarkMultilingualSTS (pol-Latn)":81.46,"STSBenchmarkMultilingualSTS (por-Latn)":84.0,"STSBenchmarkMultilingualSTS (rus-Cyrl)":82.45,"STSBenchmarkMultilingualSTS (spa-Latn)":84.61} +{"Rank":82,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":13.36,"STS17 (en-ar)":-5.65,"STS17 (en-de)":67.11,"STS17 (en-tr)":-0.02,"STS17 (es-en)":47.72,"STS17 (es-es)":79.94,"STS17 (fr-en)":56.61,"STS17 (it-en)":30.46,"STS17 (ko-ko)":10.06,"STS17 (nl-en)":36.46,"STS22 (ar)":31.2,"STS22 (de)":42.08,"STS22 (de-en)":46.9,"STS22 (de-fr)":55.04,"STS22 (de-pl)":33.94,"STS22 (es)":53.81,"STS22 (es-en)":65.19,"STS22 (es-it)":55.29,"STS22 (fr)":77.69,"STS22 (fr-pl)":28.17,"STS22 (it)":60.65,"STS22 (pl)":24.42,"STS22 (pl-en)":42.97,"STS22 (ru)":12.13,"STS22 (tr)":40.45,"STS22 (zh-en)":20.15,"STSBenchmark":85.52,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":74.04,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":83,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","STS17 (ar-ar)":10.75,"STS17 (en-ar)":-4.71,"STS17 (en-de)":73.62,"STS17 (en-tr)":-0.42,"STS17 (es-en)":62.62,"STS17 (es-es)":82.74,"STS17 (fr-en)":67.86,"STS17 (it-en)":51.86,"STS17 (ko-ko)":9.44,"STS17 (nl-en)":45.95,"STS22 (ar)":27.01,"STS22 (de)":43.73,"STS22 (de-en)":49.93,"STS22 (de-fr)":61.58,"STS22 (de-pl)":38.83,"STS22 (es)":57.68,"STS22 (es-en)":68.09,"STS22 (es-it)":61.58,"STS22 (fr)":75.01,"STS22 (fr-pl)":5.63,"STS22 (it)":62.01,"STS22 (pl)":25.0,"STS22 (pl-en)":51.72,"STS22 (ru)":14.21,"STS22 (tr)":47.3,"STS22 (zh-en)":23.1,"STSBenchmark":85.36,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":77.59,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":84,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","STS17 (ar-ar)":11.13,"STS17 (en-ar)":-3.93,"STS17 (en-de)":79.04,"STS17 (en-tr)":13.61,"STS17 (es-en)":71.72,"STS17 (es-es)":83.42,"STS17 (fr-en)":71.38,"STS17 (it-en)":69.5,"STS17 (ko-ko)":9.61,"STS17 (nl-en)":66.12,"STS22 (ar)":29.6,"STS22 (de)":47.72,"STS22 (de-en)":49.64,"STS22 (de-fr)":62.21,"STS22 (de-pl)":34.34,"STS22 (es)":58.16,"STS22 (es-en)":69.15,"STS22 (es-it)":65.26,"STS22 (fr)":77.49,"STS22 (fr-pl)":50.71,"STS22 (it)":66.91,"STS22 (pl)":27.04,"STS22 (pl-en)":58.85,"STS22 (ru)":26.63,"STS22 (tr)":43.36,"STS22 (zh-en)":29.0,"STSBenchmark":83.93,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":79.42,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":85,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":76.8,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.01,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":81.24,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":86,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":81.81,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":87,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":78.12,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":88,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.1,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":83.48,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":89,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":77.91,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":75.48,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":90,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":71.11,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":78.16,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":91,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":56.72,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":46.23,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":92,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":55.49,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":42.32,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":93,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.78,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":94,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.32,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":95,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.02,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":96,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.08,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":97,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.09,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":64.5,"STS22 (zh-en)":"","STSBenchmark":83.17,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":77.55,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":98,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.34,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":99,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.56,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":100,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.24,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":101,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.54,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} diff --git a/all_data_tasks/36/default.jsonl b/all_data_tasks/36/default.jsonl index fc1ab7fecf6313928e1c3f8403e089cdb5b2998d..f8e7b0c80b54ecdeb6458ec3951d80e63e33b160 100644 --- a/all_data_tasks/36/default.jsonl +++ b/all_data_tasks/36/default.jsonl @@ -1,30 +1,36 @@ -{"index":13,"Rank":1,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":35.2,"ARCChallenge":26.68,"AlphaNLI":34.0,"HellaSwag":39.45,"PIQA":44.35,"Quail":11.69,"RARbCode":84.0,"RARbMath":82.35,"SIQA":7.23,"SpartQA":9.29,"TempReasonL1":7.15,"TempReasonL2Fact":58.38,"TempReasonL2Pure":11.22,"TempReasonL3Fact":44.29,"TempReasonL3Pure":14.15,"WinoGrande":53.74} -{"index":26,"Rank":2,"Model":"text-embedding-3-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":31.13,"ARCChallenge":21.22,"AlphaNLI":34.23,"HellaSwag":31.4,"PIQA":37.52,"Quail":13.6,"RARbCode":89.41,"RARbMath":87.73,"SIQA":4.99,"SpartQA":7.45,"TempReasonL1":2.07,"TempReasonL2Fact":39.77,"TempReasonL2Pure":11.04,"TempReasonL3Fact":37.04,"TempReasonL3Pure":15.51,"WinoGrande":33.92} -{"index":12,"Rank":3,"Model":"GritLM-7B-noinstruct<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":30.57,"ARCChallenge":16.57,"AlphaNLI":29.56,"HellaSwag":36.03,"PIQA":35.8,"Quail":8.68,"RARbCode":83.14,"RARbMath":83.01,"SIQA":5.73,"SpartQA":1.56,"TempReasonL1":2.57,"TempReasonL2Fact":48.25,"TempReasonL2Pure":8.98,"TempReasonL3Fact":34.11,"TempReasonL3Pure":12.44,"WinoGrande":52.12} -{"index":27,"Rank":4,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.95,"ARCChallenge":23.98,"AlphaNLI":37.27,"HellaSwag":34.12,"PIQA":41.96,"Quail":10.15,"RARbCode":89.64,"RARbMath":90.08,"SIQA":3.44,"SpartQA":7.51,"TempReasonL1":2.13,"TempReasonL2Fact":28.65,"TempReasonL2Pure":10.34,"TempReasonL3Fact":25.52,"TempReasonL3Pure":15.28,"WinoGrande":29.11} -{"index":16,"Rank":5,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":28.41,"ARCChallenge":17.81,"AlphaNLI":26.12,"HellaSwag":34.85,"PIQA":39.37,"Quail":7.01,"RARbCode":78.46,"RARbMath":72.16,"SIQA":5.42,"SpartQA":9.92,"TempReasonL1":3.31,"TempReasonL2Fact":36.9,"TempReasonL2Pure":9.18,"TempReasonL3Fact":30.18,"TempReasonL3Pure":14.31,"WinoGrande":41.21} -{"index":17,"Rank":6,"Model":"e5-mistral-7b-instruct-noinstruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":28.04,"ARCChallenge":20.48,"AlphaNLI":18.88,"HellaSwag":32.25,"PIQA":32.8,"Quail":6.25,"RARbCode":79.84,"RARbMath":76.19,"SIQA":5.08,"SpartQA":10.87,"TempReasonL1":3.04,"TempReasonL2Fact":35.63,"TempReasonL2Pure":9.32,"TempReasonL3Fact":30.41,"TempReasonL3Pure":14.39,"WinoGrande":45.18} -{"index":10,"Rank":7,"Model":"Cohere-embed-english-v3.0-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":25.41,"ARCChallenge":10.1,"AlphaNLI":18.75,"HellaSwag":29.02,"PIQA":27.89,"Quail":7.77,"RARbCode":56.56,"RARbMath":72.05,"SIQA":5.03,"SpartQA":3.33,"TempReasonL1":1.43,"TempReasonL2Fact":40.46,"TempReasonL2Pure":2.39,"TempReasonL3Fact":33.87,"TempReasonL3Pure":7.52,"WinoGrande":65.02} -{"index":29,"Rank":8,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":24.2,"ARCChallenge":14.63,"AlphaNLI":30.61,"HellaSwag":30.94,"PIQA":33.69,"Quail":6.11,"RARbCode":72.03,"RARbMath":71.07,"SIQA":3.03,"SpartQA":6.63,"TempReasonL1":2.35,"TempReasonL2Fact":25.68,"TempReasonL2Pure":2.76,"TempReasonL3Fact":22.09,"TempReasonL3Pure":9.79,"WinoGrande":31.53} -{"index":11,"Rank":9,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":23.65,"ARCChallenge":9.89,"AlphaNLI":15.1,"HellaSwag":26.35,"PIQA":28.49,"Quail":4.1,"RARbCode":57.19,"RARbMath":72.26,"SIQA":4.26,"SpartQA":3.75,"TempReasonL1":1.5,"TempReasonL2Fact":35.91,"TempReasonL2Pure":1.89,"TempReasonL3Fact":27.51,"TempReasonL3Pure":8.53,"WinoGrande":58.01} -{"index":25,"Rank":10,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":22.57,"ARCChallenge":13.3,"AlphaNLI":25.65,"HellaSwag":29.29,"PIQA":31.02,"Quail":5.83,"RARbCode":83.39,"RARbMath":73.21,"SIQA":3.14,"SpartQA":4.23,"TempReasonL1":1.68,"TempReasonL2Fact":19.93,"TempReasonL2Pure":2.6,"TempReasonL3Fact":18.02,"TempReasonL3Pure":7.58,"WinoGrande":19.65} -{"index":28,"Rank":11,"Model":"text-embedding-3-small-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":22.09,"ARCChallenge":13.76,"AlphaNLI":21.14,"HellaSwag":27.2,"PIQA":29.59,"Quail":6.64,"RARbCode":72.14,"RARbMath":64.31,"SIQA":2.98,"SpartQA":3.58,"TempReasonL1":2.29,"TempReasonL2Fact":26.34,"TempReasonL2Pure":3.17,"TempReasonL3Fact":22.72,"TempReasonL3Pure":9.98,"WinoGrande":25.49} -{"index":7,"Rank":12,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":21.48,"ARCChallenge":9.02,"AlphaNLI":24.73,"HellaSwag":25.67,"PIQA":22.93,"Quail":7.51,"RARbCode":38.8,"RARbMath":69.19,"SIQA":4.89,"SpartQA":7.49,"TempReasonL1":0.99,"TempReasonL2Fact":33.23,"TempReasonL2Pure":0.68,"TempReasonL3Fact":30.05,"TempReasonL3Pure":5.28,"WinoGrande":41.72} -{"index":6,"Rank":13,"Model":"bge-m3-instruct<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":20.83,"ARCChallenge":9.03,"AlphaNLI":24.69,"HellaSwag":25.55,"PIQA":19.03,"Quail":7.08,"RARbCode":39.58,"RARbMath":64.51,"SIQA":4.77,"SpartQA":7.0,"TempReasonL1":0.8,"TempReasonL2Fact":34.99,"TempReasonL2Pure":0.62,"TempReasonL3Fact":32.47,"TempReasonL3Pure":7.01,"WinoGrande":35.33} -{"index":20,"Rank":14,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":19.61,"ARCChallenge":9.48,"AlphaNLI":28.19,"HellaSwag":24.21,"PIQA":25.28,"Quail":3.92,"RARbCode":44.27,"RARbMath":68.19,"SIQA":1.56,"SpartQA":1.65,"TempReasonL1":1.53,"TempReasonL2Fact":17.65,"TempReasonL2Pure":0.46,"TempReasonL3Fact":14.16,"TempReasonL3Pure":6.33,"WinoGrande":47.33} -{"index":24,"Rank":15,"Model":"text-embedding-ada-002-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":19.56,"ARCChallenge":11.85,"AlphaNLI":10.62,"HellaSwag":24.8,"PIQA":23.87,"Quail":5.79,"RARbCode":82.36,"RARbMath":67.26,"SIQA":2.64,"SpartQA":4.75,"TempReasonL1":1.44,"TempReasonL2Fact":19.38,"TempReasonL2Pure":2.43,"TempReasonL3Fact":17.58,"TempReasonL3Pure":7.31,"WinoGrande":11.36} -{"index":1,"Rank":16,"Model":"dragon-plus<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":19.1,"ARCChallenge":8.91,"AlphaNLI":32.1,"HellaSwag":27.69,"PIQA":28.01,"Quail":4.09,"RARbCode":17.58,"RARbMath":45.09,"SIQA":2.0,"SpartQA":10.34,"TempReasonL1":1.82,"TempReasonL2Fact":17.45,"TempReasonL2Pure":0.55,"TempReasonL3Fact":15.71,"TempReasonL3Pure":7.97,"WinoGrande":67.18} -{"index":22,"Rank":17,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":18.03,"ARCChallenge":11.8,"AlphaNLI":22.41,"HellaSwag":26.27,"PIQA":29.03,"Quail":3.41,"RARbCode":53.21,"RARbMath":71.85,"SIQA":2.38,"SpartQA":0.22,"TempReasonL1":1.77,"TempReasonL2Fact":11.2,"TempReasonL2Pure":1.15,"TempReasonL3Fact":9.42,"TempReasonL3Pure":5.59,"WinoGrande":20.8} -{"index":5,"Rank":18,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":17.7,"ARCChallenge":9.99,"AlphaNLI":13.13,"HellaSwag":28.5,"PIQA":27.99,"Quail":1.83,"RARbCode":48.12,"RARbMath":57.36,"SIQA":1.04,"SpartQA":2.99,"TempReasonL1":1.46,"TempReasonL2Fact":24.25,"TempReasonL2Pure":2.35,"TempReasonL3Fact":20.64,"TempReasonL3Pure":6.67,"WinoGrande":19.18} -{"index":18,"Rank":19,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":17.35,"ARCChallenge":10.23,"AlphaNLI":25.35,"HellaSwag":24.08,"PIQA":26.44,"Quail":3.08,"RARbCode":42.44,"RARbMath":66.36,"SIQA":2.09,"SpartQA":2.67,"TempReasonL1":1.66,"TempReasonL2Fact":10.31,"TempReasonL2Pure":0.63,"TempReasonL3Fact":11.11,"TempReasonL3Pure":6.63,"WinoGrande":27.2} -{"index":0,"Rank":20,"Model":"dragon-plus-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":16.73,"ARCChallenge":8.24,"AlphaNLI":25.18,"HellaSwag":24.06,"PIQA":26.35,"Quail":4.2,"RARbCode":12.84,"RARbMath":36.15,"SIQA":1.75,"SpartQA":10.82,"TempReasonL1":1.54,"TempReasonL2Fact":16.11,"TempReasonL2Pure":0.57,"TempReasonL3Fact":14.81,"TempReasonL3Pure":7.46,"WinoGrande":60.84} -{"index":19,"Rank":21,"Model":"all-MiniLM-L6-v2-instruct<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":15.95,"ARCChallenge":9.4,"AlphaNLI":15.09,"HellaSwag":20.51,"PIQA":24.68,"Quail":3.46,"RARbCode":42.47,"RARbMath":62.39,"SIQA":1.53,"SpartQA":0.57,"TempReasonL1":1.05,"TempReasonL2Fact":16.57,"TempReasonL2Pure":0.49,"TempReasonL3Fact":14.01,"TempReasonL3Pure":6.27,"WinoGrande":20.73} -{"index":15,"Rank":22,"Model":"contriever<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":15.86,"ARCChallenge":8.62,"AlphaNLI":31.77,"HellaSwag":14.42,"PIQA":24.64,"Quail":4.97,"RARbCode":9.28,"RARbMath":30.76,"SIQA":1.27,"SpartQA":10.94,"TempReasonL1":1.93,"TempReasonL2Fact":22.68,"TempReasonL2Pure":1.12,"TempReasonL3Fact":20.62,"TempReasonL3Pure":7.8,"WinoGrande":47.15} -{"index":3,"Rank":23,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":14.93,"ARCChallenge":9.66,"AlphaNLI":10.99,"HellaSwag":26.64,"PIQA":25.69,"Quail":1.42,"RARbCode":46.47,"RARbMath":46.86,"SIQA":0.94,"SpartQA":3.37,"TempReasonL1":1.07,"TempReasonL2Fact":17.23,"TempReasonL2Pure":1.29,"TempReasonL3Fact":13.36,"TempReasonL3Pure":5.2,"WinoGrande":13.76} -{"index":4,"Rank":24,"Model":"bge-large-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":14.55,"ARCChallenge":8.86,"AlphaNLI":0.86,"HellaSwag":26.24,"PIQA":23.26,"Quail":2.72,"RARbCode":45.25,"RARbMath":49.82,"SIQA":0.59,"SpartQA":2.34,"TempReasonL1":1.17,"TempReasonL2Fact":21.19,"TempReasonL2Pure":2.1,"TempReasonL3Fact":17.59,"TempReasonL3Pure":5.99,"WinoGrande":10.31} -{"index":9,"Rank":25,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":14.15,"ARCChallenge":8.95,"AlphaNLI":11.64,"HellaSwag":25.44,"PIQA":23.92,"Quail":1.75,"RARbCode":42.36,"RARbMath":44.98,"SIQA":0.77,"SpartQA":3.55,"TempReasonL1":1.41,"TempReasonL2Fact":17.56,"TempReasonL2Pure":1.05,"TempReasonL3Fact":13.88,"TempReasonL3Pure":4.76,"WinoGrande":10.28} -{"index":21,"Rank":26,"Model":"all-mpnet-base-v2-instruct<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":13.84,"ARCChallenge":10.35,"AlphaNLI":1.96,"HellaSwag":13.01,"PIQA":27.18,"Quail":3.02,"RARbCode":48.95,"RARbMath":69.21,"SIQA":1.29,"SpartQA":1.01,"TempReasonL1":1.52,"TempReasonL2Fact":7.28,"TempReasonL2Pure":1.03,"TempReasonL3Fact":7.03,"TempReasonL3Pure":5.16,"WinoGrande":9.66} -{"index":2,"Rank":27,"Model":"bge-base-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":13.52,"ARCChallenge":8.85,"AlphaNLI":4.13,"HellaSwag":24.03,"PIQA":23.03,"Quail":1.25,"RARbCode":46.32,"RARbMath":45.62,"SIQA":0.24,"SpartQA":2.67,"TempReasonL1":0.8,"TempReasonL2Fact":16.56,"TempReasonL2Pure":1.33,"TempReasonL3Fact":12.68,"TempReasonL3Pure":5.08,"WinoGrande":10.27} -{"index":8,"Rank":28,"Model":"bge-small-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":12.6,"ARCChallenge":7.72,"AlphaNLI":1.26,"HellaSwag":23.41,"PIQA":20.79,"Quail":2.01,"RARbCode":41.52,"RARbMath":46.5,"SIQA":0.98,"SpartQA":2.86,"TempReasonL1":1.27,"TempReasonL2Fact":16.72,"TempReasonL2Pure":1.1,"TempReasonL3Fact":12.81,"TempReasonL3Pure":4.63,"WinoGrande":5.35} -{"index":14,"Rank":29,"Model":"contriever-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":"","ARCChallenge":7.63,"AlphaNLI":27.09,"HellaSwag":"","PIQA":21.73,"Quail":4.92,"RARbCode":7.12,"RARbMath":21.83,"SIQA":0.88,"SpartQA":10.56,"TempReasonL1":1.8,"TempReasonL2Fact":22.03,"TempReasonL2Pure":0.94,"TempReasonL3Fact":20.82,"TempReasonL3Pure":7.15,"WinoGrande":26.3} -{"index":23,"Rank":30,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ARCChallenge":"","AlphaNLI":"","HellaSwag":"","PIQA":"","Quail":"","RARbCode":"","RARbMath":"","SIQA":"","SpartQA":"","TempReasonL1":"","TempReasonL2Fact":"","TempReasonL2Pure":"","TempReasonL3Fact":"","TempReasonL3Pure":"","WinoGrande":""} +{"Rank":1,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":35.2,"ARCChallenge":26.68,"AlphaNLI":34.0,"HellaSwag":39.45,"PIQA":44.35,"Quail":11.69,"RARbCode":84.0,"RARbMath":82.35,"SIQA":7.23,"SpartQA":9.29,"TempReasonL1":7.15,"TempReasonL2Fact":58.38,"TempReasonL2Pure":11.22,"TempReasonL3Fact":44.29,"TempReasonL3Pure":14.15,"WinoGrande":53.74} +{"Rank":2,"Model":"text-embedding-3-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":31.13,"ARCChallenge":21.22,"AlphaNLI":34.23,"HellaSwag":31.4,"PIQA":37.52,"Quail":13.6,"RARbCode":89.41,"RARbMath":87.73,"SIQA":4.99,"SpartQA":7.45,"TempReasonL1":2.07,"TempReasonL2Fact":39.77,"TempReasonL2Pure":11.04,"TempReasonL3Fact":37.04,"TempReasonL3Pure":15.51,"WinoGrande":33.92} +{"Rank":3,"Model":"GritLM-7B-noinstruct<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":30.57,"ARCChallenge":16.57,"AlphaNLI":29.56,"HellaSwag":36.03,"PIQA":35.8,"Quail":8.68,"RARbCode":83.14,"RARbMath":83.01,"SIQA":5.73,"SpartQA":1.56,"TempReasonL1":2.57,"TempReasonL2Fact":48.25,"TempReasonL2Pure":8.98,"TempReasonL3Fact":34.11,"TempReasonL3Pure":12.44,"WinoGrande":52.12} +{"Rank":4,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.95,"ARCChallenge":23.98,"AlphaNLI":37.27,"HellaSwag":34.12,"PIQA":41.96,"Quail":10.15,"RARbCode":89.64,"RARbMath":90.08,"SIQA":3.44,"SpartQA":7.51,"TempReasonL1":2.13,"TempReasonL2Fact":28.65,"TempReasonL2Pure":10.34,"TempReasonL3Fact":25.52,"TempReasonL3Pure":15.28,"WinoGrande":29.11} +{"Rank":5,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":28.41,"ARCChallenge":17.81,"AlphaNLI":26.12,"HellaSwag":34.85,"PIQA":39.37,"Quail":7.01,"RARbCode":78.46,"RARbMath":72.16,"SIQA":5.42,"SpartQA":9.92,"TempReasonL1":3.31,"TempReasonL2Fact":36.9,"TempReasonL2Pure":9.18,"TempReasonL3Fact":30.18,"TempReasonL3Pure":14.31,"WinoGrande":41.21} +{"Rank":6,"Model":"e5-mistral-7b-instruct-noinstruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":28.04,"ARCChallenge":20.48,"AlphaNLI":18.88,"HellaSwag":32.25,"PIQA":32.8,"Quail":6.25,"RARbCode":79.84,"RARbMath":76.19,"SIQA":5.08,"SpartQA":10.87,"TempReasonL1":3.04,"TempReasonL2Fact":35.63,"TempReasonL2Pure":9.32,"TempReasonL3Fact":30.41,"TempReasonL3Pure":14.39,"WinoGrande":45.18} +{"Rank":7,"Model":"Cohere-embed-english-v3.0-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":25.41,"ARCChallenge":10.1,"AlphaNLI":18.75,"HellaSwag":29.02,"PIQA":27.89,"Quail":7.77,"RARbCode":56.56,"RARbMath":72.05,"SIQA":5.03,"SpartQA":3.33,"TempReasonL1":1.43,"TempReasonL2Fact":40.46,"TempReasonL2Pure":2.39,"TempReasonL3Fact":33.87,"TempReasonL3Pure":7.52,"WinoGrande":65.02} +{"Rank":8,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":24.69,"ARCChallenge":10.83,"AlphaNLI":13.59,"HellaSwag":27.35,"PIQA":28.82,"Quail":4.85,"RARbCode":58.92,"RARbMath":67.32,"SIQA":5.36,"SpartQA":5.64,"TempReasonL1":1.14,"TempReasonL2Fact":42.97,"TempReasonL2Pure":2.05,"TempReasonL3Fact":38.22,"TempReasonL3Pure":8.31,"WinoGrande":54.99} +{"Rank":9,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":24.2,"ARCChallenge":14.63,"AlphaNLI":30.61,"HellaSwag":30.94,"PIQA":33.69,"Quail":6.11,"RARbCode":72.03,"RARbMath":71.07,"SIQA":3.03,"SpartQA":6.63,"TempReasonL1":2.35,"TempReasonL2Fact":25.68,"TempReasonL2Pure":2.76,"TempReasonL3Fact":22.09,"TempReasonL3Pure":9.79,"WinoGrande":31.53} +{"Rank":10,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":23.65,"ARCChallenge":9.89,"AlphaNLI":15.1,"HellaSwag":26.35,"PIQA":28.49,"Quail":4.1,"RARbCode":57.19,"RARbMath":72.26,"SIQA":4.26,"SpartQA":3.75,"TempReasonL1":1.5,"TempReasonL2Fact":35.91,"TempReasonL2Pure":1.89,"TempReasonL3Fact":27.51,"TempReasonL3Pure":8.53,"WinoGrande":58.01} +{"Rank":11,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":23.26,"ARCChallenge":9.61,"AlphaNLI":16.44,"HellaSwag":24.79,"PIQA":25.09,"Quail":3.52,"RARbCode":52.16,"RARbMath":65.35,"SIQA":3.72,"SpartQA":7.91,"TempReasonL1":0.72,"TempReasonL2Fact":38.76,"TempReasonL2Pure":1.63,"TempReasonL3Fact":35.85,"TempReasonL3Pure":7.11,"WinoGrande":56.18} +{"Rank":12,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":22.57,"ARCChallenge":13.3,"AlphaNLI":25.65,"HellaSwag":29.29,"PIQA":31.02,"Quail":5.83,"RARbCode":83.39,"RARbMath":73.21,"SIQA":3.14,"SpartQA":4.23,"TempReasonL1":1.68,"TempReasonL2Fact":19.93,"TempReasonL2Pure":2.6,"TempReasonL3Fact":18.02,"TempReasonL3Pure":7.58,"WinoGrande":19.65} +{"Rank":13,"Model":"text-embedding-3-small-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":22.09,"ARCChallenge":13.76,"AlphaNLI":21.14,"HellaSwag":27.2,"PIQA":29.59,"Quail":6.64,"RARbCode":72.14,"RARbMath":64.31,"SIQA":2.98,"SpartQA":3.58,"TempReasonL1":2.29,"TempReasonL2Fact":26.34,"TempReasonL2Pure":3.17,"TempReasonL3Fact":22.72,"TempReasonL3Pure":9.98,"WinoGrande":25.49} +{"Rank":14,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":21.48,"ARCChallenge":9.02,"AlphaNLI":24.73,"HellaSwag":25.67,"PIQA":22.93,"Quail":7.51,"RARbCode":38.8,"RARbMath":69.19,"SIQA":4.89,"SpartQA":7.49,"TempReasonL1":0.99,"TempReasonL2Fact":33.23,"TempReasonL2Pure":0.68,"TempReasonL3Fact":30.05,"TempReasonL3Pure":5.28,"WinoGrande":41.72} +{"Rank":15,"Model":"bge-m3-instruct<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":20.83,"ARCChallenge":9.03,"AlphaNLI":24.69,"HellaSwag":25.55,"PIQA":19.03,"Quail":7.08,"RARbCode":39.58,"RARbMath":64.51,"SIQA":4.77,"SpartQA":7.0,"TempReasonL1":0.8,"TempReasonL2Fact":34.99,"TempReasonL2Pure":0.62,"TempReasonL3Fact":32.47,"TempReasonL3Pure":7.01,"WinoGrande":35.33} +{"Rank":16,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":20.04,"ARCChallenge":7.14,"AlphaNLI":13.0,"HellaSwag":23.73,"PIQA":21.08,"Quail":2.38,"RARbCode":46.96,"RARbMath":63.91,"SIQA":2.57,"SpartQA":5.43,"TempReasonL1":0.8,"TempReasonL2Fact":36.76,"TempReasonL2Pure":0.62,"TempReasonL3Fact":32.42,"TempReasonL3Pure":6.36,"WinoGrande":37.46} +{"Rank":17,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":19.61,"ARCChallenge":9.48,"AlphaNLI":28.19,"HellaSwag":24.21,"PIQA":25.28,"Quail":3.92,"RARbCode":44.27,"RARbMath":68.19,"SIQA":1.56,"SpartQA":1.65,"TempReasonL1":1.53,"TempReasonL2Fact":17.65,"TempReasonL2Pure":0.46,"TempReasonL3Fact":14.16,"TempReasonL3Pure":6.33,"WinoGrande":47.33} +{"Rank":18,"Model":"text-embedding-ada-002-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":19.56,"ARCChallenge":11.85,"AlphaNLI":10.62,"HellaSwag":24.8,"PIQA":23.87,"Quail":5.79,"RARbCode":82.36,"RARbMath":67.26,"SIQA":2.64,"SpartQA":4.75,"TempReasonL1":1.44,"TempReasonL2Fact":19.38,"TempReasonL2Pure":2.43,"TempReasonL3Fact":17.58,"TempReasonL3Pure":7.31,"WinoGrande":11.36} +{"Rank":19,"Model":"dragon-plus<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":19.1,"ARCChallenge":8.91,"AlphaNLI":32.1,"HellaSwag":27.69,"PIQA":28.01,"Quail":4.09,"RARbCode":17.58,"RARbMath":45.09,"SIQA":2.0,"SpartQA":10.34,"TempReasonL1":1.82,"TempReasonL2Fact":17.45,"TempReasonL2Pure":0.55,"TempReasonL3Fact":15.71,"TempReasonL3Pure":7.97,"WinoGrande":67.18} +{"Rank":20,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":18.03,"ARCChallenge":11.8,"AlphaNLI":22.41,"HellaSwag":26.27,"PIQA":29.03,"Quail":3.41,"RARbCode":53.21,"RARbMath":71.85,"SIQA":2.38,"SpartQA":0.22,"TempReasonL1":1.77,"TempReasonL2Fact":11.2,"TempReasonL2Pure":1.15,"TempReasonL3Fact":9.42,"TempReasonL3Pure":5.59,"WinoGrande":20.8} +{"Rank":21,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":17.7,"ARCChallenge":9.99,"AlphaNLI":13.13,"HellaSwag":28.5,"PIQA":27.99,"Quail":1.83,"RARbCode":48.12,"RARbMath":57.36,"SIQA":1.04,"SpartQA":2.99,"TempReasonL1":1.46,"TempReasonL2Fact":24.25,"TempReasonL2Pure":2.35,"TempReasonL3Fact":20.64,"TempReasonL3Pure":6.67,"WinoGrande":19.18} +{"Rank":22,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":17.35,"ARCChallenge":10.23,"AlphaNLI":25.35,"HellaSwag":24.08,"PIQA":26.44,"Quail":3.08,"RARbCode":42.44,"RARbMath":66.36,"SIQA":2.09,"SpartQA":2.67,"TempReasonL1":1.66,"TempReasonL2Fact":10.31,"TempReasonL2Pure":0.63,"TempReasonL3Fact":11.11,"TempReasonL3Pure":6.63,"WinoGrande":27.2} +{"Rank":23,"Model":"dragon-plus-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":16.73,"ARCChallenge":8.24,"AlphaNLI":25.18,"HellaSwag":24.06,"PIQA":26.35,"Quail":4.2,"RARbCode":12.84,"RARbMath":36.15,"SIQA":1.75,"SpartQA":10.82,"TempReasonL1":1.54,"TempReasonL2Fact":16.11,"TempReasonL2Pure":0.57,"TempReasonL3Fact":14.81,"TempReasonL3Pure":7.46,"WinoGrande":60.84} +{"Rank":24,"Model":"contriever<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":16.09,"ARCChallenge":8.62,"AlphaNLI":31.77,"HellaSwag":17.73,"PIQA":24.64,"Quail":4.97,"RARbCode":9.28,"RARbMath":30.76,"SIQA":1.27,"SpartQA":10.94,"TempReasonL1":1.93,"TempReasonL2Fact":22.68,"TempReasonL2Pure":1.12,"TempReasonL3Fact":20.62,"TempReasonL3Pure":7.8,"WinoGrande":47.15} +{"Rank":25,"Model":"all-MiniLM-L6-v2-instruct<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":15.95,"ARCChallenge":9.4,"AlphaNLI":15.09,"HellaSwag":20.51,"PIQA":24.68,"Quail":3.46,"RARbCode":42.47,"RARbMath":62.39,"SIQA":1.53,"SpartQA":0.57,"TempReasonL1":1.05,"TempReasonL2Fact":16.57,"TempReasonL2Pure":0.49,"TempReasonL3Fact":14.01,"TempReasonL3Pure":6.27,"WinoGrande":20.73} +{"Rank":26,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":14.93,"ARCChallenge":9.66,"AlphaNLI":10.99,"HellaSwag":26.64,"PIQA":25.69,"Quail":1.42,"RARbCode":46.47,"RARbMath":46.86,"SIQA":0.94,"SpartQA":3.37,"TempReasonL1":1.07,"TempReasonL2Fact":17.23,"TempReasonL2Pure":1.29,"TempReasonL3Fact":13.36,"TempReasonL3Pure":5.2,"WinoGrande":13.76} +{"Rank":27,"Model":"bge-large-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":14.55,"ARCChallenge":8.86,"AlphaNLI":0.86,"HellaSwag":26.24,"PIQA":23.26,"Quail":2.72,"RARbCode":45.25,"RARbMath":49.82,"SIQA":0.59,"SpartQA":2.34,"TempReasonL1":1.17,"TempReasonL2Fact":21.19,"TempReasonL2Pure":2.1,"TempReasonL3Fact":17.59,"TempReasonL3Pure":5.99,"WinoGrande":10.31} +{"Rank":28,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":14.15,"ARCChallenge":8.95,"AlphaNLI":11.64,"HellaSwag":25.44,"PIQA":23.92,"Quail":1.75,"RARbCode":42.36,"RARbMath":44.98,"SIQA":0.77,"SpartQA":3.55,"TempReasonL1":1.41,"TempReasonL2Fact":17.56,"TempReasonL2Pure":1.05,"TempReasonL3Fact":13.88,"TempReasonL3Pure":4.76,"WinoGrande":10.28} +{"Rank":29,"Model":"all-mpnet-base-v2-instruct<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":13.84,"ARCChallenge":10.35,"AlphaNLI":1.96,"HellaSwag":13.01,"PIQA":27.18,"Quail":3.02,"RARbCode":48.95,"RARbMath":69.21,"SIQA":1.29,"SpartQA":1.01,"TempReasonL1":1.52,"TempReasonL2Fact":7.28,"TempReasonL2Pure":1.03,"TempReasonL3Fact":7.03,"TempReasonL3Pure":5.16,"WinoGrande":9.66} +{"Rank":30,"Model":"bge-base-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":13.52,"ARCChallenge":8.85,"AlphaNLI":4.13,"HellaSwag":24.03,"PIQA":23.03,"Quail":1.25,"RARbCode":46.32,"RARbMath":45.62,"SIQA":0.24,"SpartQA":2.67,"TempReasonL1":0.8,"TempReasonL2Fact":16.56,"TempReasonL2Pure":1.33,"TempReasonL3Fact":12.68,"TempReasonL3Pure":5.08,"WinoGrande":10.27} +{"Rank":31,"Model":"bge-small-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":12.6,"ARCChallenge":7.72,"AlphaNLI":1.26,"HellaSwag":23.41,"PIQA":20.79,"Quail":2.01,"RARbCode":41.52,"RARbMath":46.5,"SIQA":0.98,"SpartQA":2.86,"TempReasonL1":1.27,"TempReasonL2Fact":16.72,"TempReasonL2Pure":1.1,"TempReasonL3Fact":12.81,"TempReasonL3Pure":4.63,"WinoGrande":5.35} +{"Rank":32,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":12.24,"ARCChallenge":7.19,"AlphaNLI":21.87,"HellaSwag":17.53,"PIQA":18.65,"Quail":2.98,"RARbCode":11.02,"RARbMath":30.93,"SIQA":1.21,"SpartQA":5.69,"TempReasonL1":1.94,"TempReasonL2Fact":5.34,"TempReasonL2Pure":0.33,"TempReasonL3Fact":6.79,"TempReasonL3Pure":3.19,"WinoGrande":49.01} +{"Rank":33,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":11.55,"ARCChallenge":6.19,"AlphaNLI":20.89,"HellaSwag":16.98,"PIQA":15.79,"Quail":2.96,"RARbCode":8.48,"RARbMath":30.02,"SIQA":0.88,"SpartQA":4.94,"TempReasonL1":1.43,"TempReasonL2Fact":6.21,"TempReasonL2Pure":0.22,"TempReasonL3Fact":6.77,"TempReasonL3Pure":4.9,"WinoGrande":46.52} +{"Rank":34,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":9.31,"ARCChallenge":3.78,"AlphaNLI":13.11,"HellaSwag":5.59,"PIQA":6.53,"Quail":1.91,"RARbCode":2.31,"RARbMath":27.19,"SIQA":1.07,"SpartQA":1.56,"TempReasonL1":1.56,"TempReasonL2Fact":7.06,"TempReasonL2Pure":0.14,"TempReasonL3Fact":8.74,"TempReasonL3Pure":4.73,"WinoGrande":54.3} +{"Rank":35,"Model":"contriever-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":"","ARCChallenge":7.63,"AlphaNLI":27.09,"HellaSwag":"","PIQA":21.73,"Quail":4.92,"RARbCode":7.12,"RARbMath":21.83,"SIQA":0.88,"SpartQA":10.56,"TempReasonL1":1.8,"TempReasonL2Fact":22.03,"TempReasonL2Pure":0.94,"TempReasonL3Fact":20.82,"TempReasonL3Pure":7.15,"WinoGrande":26.3} +{"Rank":36,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","ARCChallenge":3.85,"AlphaNLI":14.15,"HellaSwag":"","PIQA":"","Quail":"","RARbCode":"","RARbMath":"","SIQA":"","SpartQA":"","TempReasonL1":"","TempReasonL2Fact":"","TempReasonL2Pure":"","TempReasonL3Fact":"","TempReasonL3Pure":"","WinoGrande":""} diff --git a/all_data_tasks/37/default.jsonl b/all_data_tasks/37/default.jsonl index dd8ff771c0e30041e69501eef76afd269ec19f9d..651c586d5c83a7d7ab48a093a03c03f6d7da87b2 100644 --- a/all_data_tasks/37/default.jsonl +++ b/all_data_tasks/37/default.jsonl @@ -1,14 +1,14 @@ -{"index":4,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":22.38,"BrightRetrieval (aops)":15.1,"BrightRetrieval (biology)":32.09,"BrightRetrieval (earth_science)":40.66,"BrightRetrieval (economics)":16.18,"BrightRetrieval (leetcode)":31.07,"BrightRetrieval (pony)":1.25,"BrightRetrieval (psychology)":26.58,"BrightRetrieval (robotics)":12.82,"BrightRetrieval (stackoverflow)":13.95,"BrightRetrieval (sustainable_living)":20.82,"BrightRetrieval (theoremqa_questions)":29.9,"BrightRetrieval (theoremqa_theorems)":28.15} -{"index":3,"Rank":2,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":21.75,"BrightRetrieval (aops)":14.36,"BrightRetrieval (biology)":30.92,"BrightRetrieval (earth_science)":36.22,"BrightRetrieval (economics)":17.72,"BrightRetrieval (leetcode)":25.46,"BrightRetrieval (pony)":9.79,"BrightRetrieval (psychology)":24.61,"BrightRetrieval (robotics)":13.47,"BrightRetrieval (stackoverflow)":19.85,"BrightRetrieval (sustainable_living)":14.93,"BrightRetrieval (theoremqa_questions)":26.97,"BrightRetrieval (theoremqa_theorems)":26.66} -{"index":7,"Rank":3,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":20.43,"BrightRetrieval (aops)":8.91,"BrightRetrieval (biology)":25.04,"BrightRetrieval (earth_science)":32.77,"BrightRetrieval (economics)":19.0,"BrightRetrieval (leetcode)":29.85,"BrightRetrieval (pony)":21.98,"BrightRetrieval (psychology)":19.92,"BrightRetrieval (robotics)":17.31,"BrightRetrieval (stackoverflow)":11.62,"BrightRetrieval (sustainable_living)":18.04,"BrightRetrieval (theoremqa_questions)":23.34,"BrightRetrieval (theoremqa_theorems)":17.41} -{"index":0,"Rank":4,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":19.73,"BrightRetrieval (aops)":9.33,"BrightRetrieval (biology)":22.98,"BrightRetrieval (earth_science)":34.38,"BrightRetrieval (economics)":19.5,"BrightRetrieval (leetcode)":29.64,"BrightRetrieval (pony)":3.59,"BrightRetrieval (psychology)":27.86,"BrightRetrieval (robotics)":15.98,"BrightRetrieval (stackoverflow)":17.93,"BrightRetrieval (sustainable_living)":17.25,"BrightRetrieval (theoremqa_questions)":21.51,"BrightRetrieval (theoremqa_theorems)":16.77} -{"index":10,"Rank":5,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":18.64,"BrightRetrieval (aops)":8.26,"BrightRetrieval (biology)":21.91,"BrightRetrieval (earth_science)":34.35,"BrightRetrieval (economics)":22.81,"BrightRetrieval (leetcode)":27.5,"BrightRetrieval (pony)":5.02,"BrightRetrieval (psychology)":27.43,"BrightRetrieval (robotics)":17.39,"BrightRetrieval (stackoverflow)":19.06,"BrightRetrieval (sustainable_living)":18.82,"BrightRetrieval (theoremqa_questions)":14.59,"BrightRetrieval (theoremqa_theorems)":6.5} -{"index":8,"Rank":6,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":18.0,"BrightRetrieval (aops)":7.43,"BrightRetrieval (biology)":19.49,"BrightRetrieval (earth_science)":26.63,"BrightRetrieval (economics)":17.84,"BrightRetrieval (leetcode)":27.35,"BrightRetrieval (pony)":1.97,"BrightRetrieval (psychology)":18.97,"BrightRetrieval (robotics)":16.7,"BrightRetrieval (stackoverflow)":12.72,"BrightRetrieval (sustainable_living)":19.79,"BrightRetrieval (theoremqa_questions)":23.05,"BrightRetrieval (theoremqa_theorems)":24.05} -{"index":1,"Rank":7,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.57,"BrightRetrieval (aops)":7.45,"BrightRetrieval (biology)":23.55,"BrightRetrieval (earth_science)":25.09,"BrightRetrieval (economics)":19.85,"BrightRetrieval (leetcode)":30.6,"BrightRetrieval (pony)":1.48,"BrightRetrieval (psychology)":24.79,"BrightRetrieval (robotics)":11.21,"BrightRetrieval (stackoverflow)":15.03,"BrightRetrieval (sustainable_living)":15.58,"BrightRetrieval (theoremqa_questions)":26.06,"BrightRetrieval (theoremqa_theorems)":10.13} -{"index":13,"Rank":8,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.43,"BrightRetrieval (aops)":8.45,"BrightRetrieval (biology)":23.67,"BrightRetrieval (earth_science)":26.27,"BrightRetrieval (economics)":19.98,"BrightRetrieval (leetcode)":23.65,"BrightRetrieval (pony)":2.45,"BrightRetrieval (psychology)":27.52,"BrightRetrieval (robotics)":12.93,"BrightRetrieval (stackoverflow)":12.49,"BrightRetrieval (sustainable_living)":20.32,"BrightRetrieval (theoremqa_questions)":22.22,"BrightRetrieval (theoremqa_theorems)":9.25} -{"index":11,"Rank":9,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":17.43,"BrightRetrieval (aops)":7.1,"BrightRetrieval (biology)":18.84,"BrightRetrieval (earth_science)":25.96,"BrightRetrieval (economics)":15.49,"BrightRetrieval (leetcode)":28.72,"BrightRetrieval (pony)":4.81,"BrightRetrieval (psychology)":15.79,"BrightRetrieval (robotics)":16.37,"BrightRetrieval (stackoverflow)":9.83,"BrightRetrieval (sustainable_living)":18.51,"BrightRetrieval (theoremqa_questions)":23.94,"BrightRetrieval (theoremqa_theorems)":23.78} -{"index":6,"Rank":10,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":16.24,"BrightRetrieval (aops)":6.46,"BrightRetrieval (biology)":18.98,"BrightRetrieval (earth_science)":27.45,"BrightRetrieval (economics)":20.18,"BrightRetrieval (leetcode)":26.78,"BrightRetrieval (pony)":1.77,"BrightRetrieval (psychology)":21.82,"BrightRetrieval (robotics)":16.21,"BrightRetrieval (stackoverflow)":16.47,"BrightRetrieval (sustainable_living)":17.69,"BrightRetrieval (theoremqa_questions)":15.07,"BrightRetrieval (theoremqa_theorems)":6.04} -{"index":12,"Rank":11,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":14.8,"BrightRetrieval (aops)":5.32,"BrightRetrieval (biology)":15.52,"BrightRetrieval (earth_science)":20.11,"BrightRetrieval (economics)":16.64,"BrightRetrieval (leetcode)":26.4,"BrightRetrieval (pony)":6.95,"BrightRetrieval (psychology)":22.63,"BrightRetrieval (robotics)":8.36,"BrightRetrieval (stackoverflow)":9.48,"BrightRetrieval (sustainable_living)":15.34,"BrightRetrieval (theoremqa_questions)":18.49,"BrightRetrieval (theoremqa_theorems)":12.38} -{"index":2,"Rank":12,"Model":"bm25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":14.29,"BrightRetrieval (aops)":6.2,"BrightRetrieval (biology)":19.19,"BrightRetrieval (earth_science)":27.06,"BrightRetrieval (economics)":14.87,"BrightRetrieval (leetcode)":24.37,"BrightRetrieval (pony)":7.93,"BrightRetrieval (psychology)":12.51,"BrightRetrieval (robotics)":13.53,"BrightRetrieval (stackoverflow)":16.55,"BrightRetrieval (sustainable_living)":15.22,"BrightRetrieval (theoremqa_questions)":9.78,"BrightRetrieval (theoremqa_theorems)":4.25} -{"index":9,"Rank":13,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":14.12,"BrightRetrieval (aops)":7.94,"BrightRetrieval (biology)":15.61,"BrightRetrieval (earth_science)":21.52,"BrightRetrieval (economics)":15.99,"BrightRetrieval (leetcode)":20.0,"BrightRetrieval (pony)":1.32,"BrightRetrieval (psychology)":21.94,"BrightRetrieval (robotics)":11.45,"BrightRetrieval (stackoverflow)":11.21,"BrightRetrieval (sustainable_living)":13.16,"BrightRetrieval (theoremqa_questions)":20.07,"BrightRetrieval (theoremqa_theorems)":9.29} -{"index":5,"Rank":14,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":13.47,"BrightRetrieval (aops)":6.08,"BrightRetrieval (biology)":11.96,"BrightRetrieval (earth_science)":24.15,"BrightRetrieval (economics)":16.59,"BrightRetrieval (leetcode)":26.68,"BrightRetrieval (pony)":5.64,"BrightRetrieval (psychology)":17.44,"BrightRetrieval (robotics)":12.21,"BrightRetrieval (stackoverflow)":9.51,"BrightRetrieval (sustainable_living)":13.27,"BrightRetrieval (theoremqa_questions)":12.56,"BrightRetrieval (theoremqa_theorems)":5.51} +{"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":22.38,"BrightRetrieval (aops)":15.1,"BrightRetrieval (biology)":32.09,"BrightRetrieval (earth_science)":40.66,"BrightRetrieval (economics)":16.18,"BrightRetrieval (leetcode)":31.07,"BrightRetrieval (pony)":1.25,"BrightRetrieval (psychology)":26.58,"BrightRetrieval (robotics)":12.82,"BrightRetrieval (stackoverflow)":13.95,"BrightRetrieval (sustainable_living)":20.82,"BrightRetrieval (theoremqa_questions)":29.9,"BrightRetrieval (theoremqa_theorems)":28.15} +{"Rank":2,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":21.75,"BrightRetrieval (aops)":14.36,"BrightRetrieval (biology)":30.92,"BrightRetrieval (earth_science)":36.22,"BrightRetrieval (economics)":17.72,"BrightRetrieval (leetcode)":25.46,"BrightRetrieval (pony)":9.79,"BrightRetrieval (psychology)":24.61,"BrightRetrieval (robotics)":13.47,"BrightRetrieval (stackoverflow)":19.85,"BrightRetrieval (sustainable_living)":14.93,"BrightRetrieval (theoremqa_questions)":26.97,"BrightRetrieval (theoremqa_theorems)":26.66} +{"Rank":3,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":20.43,"BrightRetrieval (aops)":8.91,"BrightRetrieval (biology)":25.04,"BrightRetrieval (earth_science)":32.77,"BrightRetrieval (economics)":19.0,"BrightRetrieval (leetcode)":29.85,"BrightRetrieval (pony)":21.98,"BrightRetrieval (psychology)":19.92,"BrightRetrieval (robotics)":17.31,"BrightRetrieval (stackoverflow)":11.62,"BrightRetrieval (sustainable_living)":18.04,"BrightRetrieval (theoremqa_questions)":23.34,"BrightRetrieval (theoremqa_theorems)":17.41} +{"Rank":4,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":19.73,"BrightRetrieval (aops)":9.33,"BrightRetrieval (biology)":22.98,"BrightRetrieval (earth_science)":34.38,"BrightRetrieval (economics)":19.5,"BrightRetrieval (leetcode)":29.64,"BrightRetrieval (pony)":3.59,"BrightRetrieval (psychology)":27.86,"BrightRetrieval (robotics)":15.98,"BrightRetrieval (stackoverflow)":17.93,"BrightRetrieval (sustainable_living)":17.25,"BrightRetrieval (theoremqa_questions)":21.51,"BrightRetrieval (theoremqa_theorems)":16.77} +{"Rank":5,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":18.64,"BrightRetrieval (aops)":8.26,"BrightRetrieval (biology)":21.91,"BrightRetrieval (earth_science)":34.35,"BrightRetrieval (economics)":22.81,"BrightRetrieval (leetcode)":27.5,"BrightRetrieval (pony)":5.02,"BrightRetrieval (psychology)":27.43,"BrightRetrieval (robotics)":17.39,"BrightRetrieval (stackoverflow)":19.06,"BrightRetrieval (sustainable_living)":18.82,"BrightRetrieval (theoremqa_questions)":14.59,"BrightRetrieval (theoremqa_theorems)":6.5} +{"Rank":6,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":18.0,"BrightRetrieval (aops)":7.43,"BrightRetrieval (biology)":19.49,"BrightRetrieval (earth_science)":26.63,"BrightRetrieval (economics)":17.84,"BrightRetrieval (leetcode)":27.35,"BrightRetrieval (pony)":1.97,"BrightRetrieval (psychology)":18.97,"BrightRetrieval (robotics)":16.7,"BrightRetrieval (stackoverflow)":12.72,"BrightRetrieval (sustainable_living)":19.79,"BrightRetrieval (theoremqa_questions)":23.05,"BrightRetrieval (theoremqa_theorems)":24.05} +{"Rank":7,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.57,"BrightRetrieval (aops)":7.45,"BrightRetrieval (biology)":23.55,"BrightRetrieval (earth_science)":25.09,"BrightRetrieval (economics)":19.85,"BrightRetrieval (leetcode)":30.6,"BrightRetrieval (pony)":1.48,"BrightRetrieval (psychology)":24.79,"BrightRetrieval (robotics)":11.21,"BrightRetrieval (stackoverflow)":15.03,"BrightRetrieval (sustainable_living)":15.58,"BrightRetrieval (theoremqa_questions)":26.06,"BrightRetrieval (theoremqa_theorems)":10.13} +{"Rank":8,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.43,"BrightRetrieval (aops)":8.45,"BrightRetrieval (biology)":23.67,"BrightRetrieval (earth_science)":26.27,"BrightRetrieval (economics)":19.98,"BrightRetrieval (leetcode)":23.65,"BrightRetrieval (pony)":2.45,"BrightRetrieval (psychology)":27.52,"BrightRetrieval (robotics)":12.93,"BrightRetrieval (stackoverflow)":12.49,"BrightRetrieval (sustainable_living)":20.32,"BrightRetrieval (theoremqa_questions)":22.22,"BrightRetrieval (theoremqa_theorems)":9.25} +{"Rank":9,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":17.43,"BrightRetrieval (aops)":7.1,"BrightRetrieval (biology)":18.84,"BrightRetrieval (earth_science)":25.96,"BrightRetrieval (economics)":15.49,"BrightRetrieval (leetcode)":28.72,"BrightRetrieval (pony)":4.81,"BrightRetrieval (psychology)":15.79,"BrightRetrieval (robotics)":16.37,"BrightRetrieval (stackoverflow)":9.83,"BrightRetrieval (sustainable_living)":18.51,"BrightRetrieval (theoremqa_questions)":23.94,"BrightRetrieval (theoremqa_theorems)":23.78} +{"Rank":10,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":16.24,"BrightRetrieval (aops)":6.46,"BrightRetrieval (biology)":18.98,"BrightRetrieval (earth_science)":27.45,"BrightRetrieval (economics)":20.18,"BrightRetrieval (leetcode)":26.78,"BrightRetrieval (pony)":1.77,"BrightRetrieval (psychology)":21.82,"BrightRetrieval (robotics)":16.21,"BrightRetrieval (stackoverflow)":16.47,"BrightRetrieval (sustainable_living)":17.69,"BrightRetrieval (theoremqa_questions)":15.07,"BrightRetrieval (theoremqa_theorems)":6.04} +{"Rank":11,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":14.8,"BrightRetrieval (aops)":5.32,"BrightRetrieval (biology)":15.52,"BrightRetrieval (earth_science)":20.11,"BrightRetrieval (economics)":16.64,"BrightRetrieval (leetcode)":26.4,"BrightRetrieval (pony)":6.95,"BrightRetrieval (psychology)":22.63,"BrightRetrieval (robotics)":8.36,"BrightRetrieval (stackoverflow)":9.48,"BrightRetrieval (sustainable_living)":15.34,"BrightRetrieval (theoremqa_questions)":18.49,"BrightRetrieval (theoremqa_theorems)":12.38} +{"Rank":12,"Model":"bm25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":14.29,"BrightRetrieval (aops)":6.2,"BrightRetrieval (biology)":19.19,"BrightRetrieval (earth_science)":27.06,"BrightRetrieval (economics)":14.87,"BrightRetrieval (leetcode)":24.37,"BrightRetrieval (pony)":7.93,"BrightRetrieval (psychology)":12.51,"BrightRetrieval (robotics)":13.53,"BrightRetrieval (stackoverflow)":16.55,"BrightRetrieval (sustainable_living)":15.22,"BrightRetrieval (theoremqa_questions)":9.78,"BrightRetrieval (theoremqa_theorems)":4.25} +{"Rank":13,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":14.12,"BrightRetrieval (aops)":7.94,"BrightRetrieval (biology)":15.61,"BrightRetrieval (earth_science)":21.52,"BrightRetrieval (economics)":15.99,"BrightRetrieval (leetcode)":20.0,"BrightRetrieval (pony)":1.32,"BrightRetrieval (psychology)":21.94,"BrightRetrieval (robotics)":11.45,"BrightRetrieval (stackoverflow)":11.21,"BrightRetrieval (sustainable_living)":13.16,"BrightRetrieval (theoremqa_questions)":20.07,"BrightRetrieval (theoremqa_theorems)":9.29} +{"Rank":14,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":13.47,"BrightRetrieval (aops)":6.08,"BrightRetrieval (biology)":11.96,"BrightRetrieval (earth_science)":24.15,"BrightRetrieval (economics)":16.59,"BrightRetrieval (leetcode)":26.68,"BrightRetrieval (pony)":5.64,"BrightRetrieval (psychology)":17.44,"BrightRetrieval (robotics)":12.21,"BrightRetrieval (stackoverflow)":9.51,"BrightRetrieval (sustainable_living)":13.27,"BrightRetrieval (theoremqa_questions)":12.56,"BrightRetrieval (theoremqa_theorems)":5.51} diff --git a/all_data_tasks/38/default.jsonl b/all_data_tasks/38/default.jsonl new file mode 100644 index 0000000000000000000000000000000000000000..7524e444fbdcb67ff3fe4e71d3324975575bb76a --- /dev/null +++ b/all_data_tasks/38/default.jsonl @@ -0,0 +1,25 @@ +{"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":64.57,"GeoreviewClassification (rus-Cyrl)":50.25,"HeadlineClassification (rus-Cyrl)":85.68,"InappropriatenessClassification (rus-Cyrl)":67.19,"KinopoiskClassification (rus-Cyrl)":65.49,"RuReviewsClassification (rus-Cyrl)":67.68,"RuSciBenchGRNTIClassification (rus-Cyrl)":64.59,"RuSciBenchOECDClassification (rus-Cyrl)":51.13} +{"Rank":2,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":59.36,"GeoreviewClassification (rus-Cyrl)":50.98,"HeadlineClassification (rus-Cyrl)":70.09,"InappropriatenessClassification (rus-Cyrl)":60.76,"KinopoiskClassification (rus-Cyrl)":63.33,"RuReviewsClassification (rus-Cyrl)":68.52,"RuSciBenchGRNTIClassification (rus-Cyrl)":57.67,"RuSciBenchOECDClassification (rus-Cyrl)":44.2} +{"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":58.92,"GeoreviewClassification (rus-Cyrl)":49.69,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":61.6,"KinopoiskClassification (rus-Cyrl)":56.59,"RuReviewsClassification (rus-Cyrl)":65.28,"RuSciBenchGRNTIClassification (rus-Cyrl)":58.2,"RuSciBenchOECDClassification (rus-Cyrl)":43.91} +{"Rank":4,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":58.03,"GeoreviewClassification (rus-Cyrl)":45.72,"HeadlineClassification (rus-Cyrl)":78.05,"InappropriatenessClassification (rus-Cyrl)":60.11,"KinopoiskClassification (rus-Cyrl)":56.14,"RuReviewsClassification (rus-Cyrl)":61.42,"RuSciBenchGRNTIClassification (rus-Cyrl)":58.93,"RuSciBenchOECDClassification (rus-Cyrl)":45.83} +{"Rank":5,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":57.86,"GeoreviewClassification (rus-Cyrl)":47.23,"HeadlineClassification (rus-Cyrl)":74.88,"InappropriatenessClassification (rus-Cyrl)":61.94,"KinopoiskClassification (rus-Cyrl)":55.69,"RuReviewsClassification (rus-Cyrl)":66.44,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.55,"RuSciBenchOECDClassification (rus-Cyrl)":43.28} +{"Rank":6,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":57.43,"GeoreviewClassification (rus-Cyrl)":48.27,"HeadlineClassification (rus-Cyrl)":70.32,"InappropriatenessClassification (rus-Cyrl)":59.87,"KinopoiskClassification (rus-Cyrl)":58.23,"RuReviewsClassification (rus-Cyrl)":66.91,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.81,"RuSciBenchOECDClassification (rus-Cyrl)":42.57} +{"Rank":7,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":56.55,"GeoreviewClassification (rus-Cyrl)":46.04,"HeadlineClassification (rus-Cyrl)":69.98,"InappropriatenessClassification (rus-Cyrl)":61.39,"KinopoiskClassification (rus-Cyrl)":53.59,"RuReviewsClassification (rus-Cyrl)":64.58,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.67,"RuSciBenchOECDClassification (rus-Cyrl)":43.58} +{"Rank":8,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":56.19,"GeoreviewClassification (rus-Cyrl)":46.05,"HeadlineClassification (rus-Cyrl)":75.64,"InappropriatenessClassification (rus-Cyrl)":58.78,"KinopoiskClassification (rus-Cyrl)":50.89,"RuReviewsClassification (rus-Cyrl)":62.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.28,"RuSciBenchOECDClassification (rus-Cyrl)":42.69} +{"Rank":9,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":55.44,"GeoreviewClassification (rus-Cyrl)":39.67,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":64.64,"KinopoiskClassification (rus-Cyrl)":50.33,"RuReviewsClassification (rus-Cyrl)":58.29,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.19,"RuSciBenchOECDClassification (rus-Cyrl)":43.8} +{"Rank":10,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":55.21,"GeoreviewClassification (rus-Cyrl)":39.97,"HeadlineClassification (rus-Cyrl)":79.26,"InappropriatenessClassification (rus-Cyrl)":62.52,"KinopoiskClassification (rus-Cyrl)":49.51,"RuReviewsClassification (rus-Cyrl)":58.27,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.9,"RuSciBenchOECDClassification (rus-Cyrl)":43.04} +{"Rank":11,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":55.09,"GeoreviewClassification (rus-Cyrl)":44.66,"HeadlineClassification (rus-Cyrl)":73.94,"InappropriatenessClassification (rus-Cyrl)":59.16,"KinopoiskClassification (rus-Cyrl)":49.96,"RuReviewsClassification (rus-Cyrl)":61.18,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.99,"RuSciBenchOECDClassification (rus-Cyrl)":41.72} +{"Rank":12,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":54.23,"GeoreviewClassification (rus-Cyrl)":40.19,"HeadlineClassification (rus-Cyrl)":78.75,"InappropriatenessClassification (rus-Cyrl)":61.33,"KinopoiskClassification (rus-Cyrl)":48.78,"RuReviewsClassification (rus-Cyrl)":55.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.53,"RuSciBenchOECDClassification (rus-Cyrl)":41.34} +{"Rank":13,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":54.11,"GeoreviewClassification (rus-Cyrl)":42.33,"HeadlineClassification (rus-Cyrl)":70.35,"InappropriatenessClassification (rus-Cyrl)":59.32,"KinopoiskClassification (rus-Cyrl)":44.31,"RuReviewsClassification (rus-Cyrl)":62.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.01,"RuSciBenchOECDClassification (rus-Cyrl)":44.14} +{"Rank":14,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":53.46,"GeoreviewClassification (rus-Cyrl)":41.36,"HeadlineClassification (rus-Cyrl)":68.9,"InappropriatenessClassification (rus-Cyrl)":59.11,"KinopoiskClassification (rus-Cyrl)":50.47,"RuReviewsClassification (rus-Cyrl)":60.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.93,"RuSciBenchOECDClassification (rus-Cyrl)":40.79} +{"Rank":15,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":52.73,"GeoreviewClassification (rus-Cyrl)":40.89,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.48,"KinopoiskClassification (rus-Cyrl)":49.85,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.8,"RuSciBenchOECDClassification (rus-Cyrl)":40.36} +{"Rank":16,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":52.35,"GeoreviewClassification (rus-Cyrl)":40.86,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.52,"KinopoiskClassification (rus-Cyrl)":46.77,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.04,"RuSciBenchOECDClassification (rus-Cyrl)":40.48} +{"Rank":17,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":52.16,"GeoreviewClassification (rus-Cyrl)":38.95,"HeadlineClassification (rus-Cyrl)":75.59,"InappropriatenessClassification (rus-Cyrl)":60.68,"KinopoiskClassification (rus-Cyrl)":49.67,"RuReviewsClassification (rus-Cyrl)":54.05,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.53,"RuSciBenchOECDClassification (rus-Cyrl)":37.65} +{"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.49,"GeoreviewClassification (rus-Cyrl)":38.05,"HeadlineClassification (rus-Cyrl)":67.64,"InappropriatenessClassification (rus-Cyrl)":58.27,"KinopoiskClassification (rus-Cyrl)":45.86,"RuReviewsClassification (rus-Cyrl)":58.34,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.18,"RuSciBenchOECDClassification (rus-Cyrl)":40.11} +{"Rank":19,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":51.38,"GeoreviewClassification (rus-Cyrl)":38.24,"HeadlineClassification (rus-Cyrl)":68.3,"InappropriatenessClassification (rus-Cyrl)":58.18,"KinopoiskClassification (rus-Cyrl)":41.45,"RuReviewsClassification (rus-Cyrl)":58.88,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.19,"RuSciBenchOECDClassification (rus-Cyrl)":41.41} +{"Rank":20,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":51.37,"GeoreviewClassification (rus-Cyrl)":39.64,"HeadlineClassification (rus-Cyrl)":74.19,"InappropriatenessClassification (rus-Cyrl)":58.57,"KinopoiskClassification (rus-Cyrl)":49.06,"RuReviewsClassification (rus-Cyrl)":56.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":45.63,"RuSciBenchOECDClassification (rus-Cyrl)":35.48} +{"Rank":21,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":50.66,"GeoreviewClassification (rus-Cyrl)":37.22,"HeadlineClassification (rus-Cyrl)":75.23,"InappropriatenessClassification (rus-Cyrl)":57.34,"KinopoiskClassification (rus-Cyrl)":49.91,"RuReviewsClassification (rus-Cyrl)":50.74,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.03,"RuSciBenchOECDClassification (rus-Cyrl)":36.13} +{"Rank":22,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":42.68,"GeoreviewClassification (rus-Cyrl)":33.45,"HeadlineClassification (rus-Cyrl)":57.65,"InappropriatenessClassification (rus-Cyrl)":54.5,"KinopoiskClassification (rus-Cyrl)":41.36,"RuReviewsClassification (rus-Cyrl)":49.56,"RuSciBenchGRNTIClassification (rus-Cyrl)":35.71,"RuSciBenchOECDClassification (rus-Cyrl)":26.51} +{"Rank":23,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":29.53,"GeoreviewClassification (rus-Cyrl)":25.93,"HeadlineClassification (rus-Cyrl)":28.53,"InappropriatenessClassification (rus-Cyrl)":51.82,"KinopoiskClassification (rus-Cyrl)":34.18,"RuReviewsClassification (rus-Cyrl)":42.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":13.29,"RuSciBenchOECDClassification (rus-Cyrl)":10.62} +{"Rank":24,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":28.67,"GeoreviewClassification (rus-Cyrl)":27.08,"HeadlineClassification (rus-Cyrl)":27.77,"InappropriatenessClassification (rus-Cyrl)":51.73,"KinopoiskClassification (rus-Cyrl)":33.93,"RuReviewsClassification (rus-Cyrl)":41.79,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.08,"RuSciBenchOECDClassification (rus-Cyrl)":8.3} +{"Rank":25,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":28.33,"GeoreviewClassification (rus-Cyrl)":23.49,"HeadlineClassification (rus-Cyrl)":28.49,"InappropriatenessClassification (rus-Cyrl)":50.85,"KinopoiskClassification (rus-Cyrl)":34.17,"RuReviewsClassification (rus-Cyrl)":42.49,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.49,"RuSciBenchOECDClassification (rus-Cyrl)":8.31} diff --git a/all_data_tasks/39/default.jsonl b/all_data_tasks/39/default.jsonl new file mode 100644 index 0000000000000000000000000000000000000000..f653fe2da5ae1a56e6b9e7e468dbe3bc1906bb32 --- /dev/null +++ b/all_data_tasks/39/default.jsonl @@ -0,0 +1,25 @@ +{"Rank":1,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":61.91,"GeoreviewClusteringP2P (rus-Cyrl)":74.06,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":60.01,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":51.66} +{"Rank":2,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":59.98,"GeoreviewClusteringP2P (rus-Cyrl)":65.68,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":61.55,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":52.72} +{"Rank":3,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":53.61,"GeoreviewClusteringP2P (rus-Cyrl)":62.79,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":53.11,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.93} +{"Rank":4,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":53.42,"GeoreviewClusteringP2P (rus-Cyrl)":64.16,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":51.38,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.73} +{"Rank":5,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":53.22,"GeoreviewClusteringP2P (rus-Cyrl)":64.55,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":50.64,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.48} +{"Rank":6,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":52.65,"GeoreviewClusteringP2P (rus-Cyrl)":58.45,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":52.2,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":47.29} +{"Rank":7,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":52.55,"GeoreviewClusteringP2P (rus-Cyrl)":60.51,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":52.03,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":45.11} +{"Rank":8,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":52.51,"GeoreviewClusteringP2P (rus-Cyrl)":63.75,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":50.57,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":43.21} +{"Rank":9,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":51.94,"GeoreviewClusteringP2P (rus-Cyrl)":59.02,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":50.4,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":46.41} +{"Rank":10,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":51.65,"GeoreviewClusteringP2P (rus-Cyrl)":58.57,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":51.1,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":45.29} +{"Rank":11,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":50.27,"GeoreviewClusteringP2P (rus-Cyrl)":54.46,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":51.56,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.78} +{"Rank":12,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":49.57,"GeoreviewClusteringP2P (rus-Cyrl)":59.71,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.55,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.44} +{"Rank":13,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":49.18,"GeoreviewClusteringP2P (rus-Cyrl)":56.18,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.47,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":42.9} +{"Rank":14,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":53.35,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.22,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.68} +{"Rank":15,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":52.19,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":49.09,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.97} +{"Rank":16,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":46.84,"GeoreviewClusteringP2P (rus-Cyrl)":51.89,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.48,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.16} +{"Rank":17,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":43.13,"GeoreviewClusteringP2P (rus-Cyrl)":41.82,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":46.29,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.28} +{"Rank":18,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":42.92,"GeoreviewClusteringP2P (rus-Cyrl)":58.79,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":36.66,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":33.31} +{"Rank":19,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":41.23,"GeoreviewClusteringP2P (rus-Cyrl)":44.18,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":41.41,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":38.09} +{"Rank":20,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":38.41,"GeoreviewClusteringP2P (rus-Cyrl)":43.26,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":37.84,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":34.12} +{"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":30.76,"GeoreviewClusteringP2P (rus-Cyrl)":34.4,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":29.89,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":27.98} +{"Rank":22,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":27.91,"GeoreviewClusteringP2P (rus-Cyrl)":28.77,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":28.29,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":26.67} +{"Rank":23,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":15.83,"GeoreviewClusteringP2P (rus-Cyrl)":20.33,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":14.66,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":12.49} +{"Rank":24,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":13.87,"GeoreviewClusteringP2P (rus-Cyrl)":20.76,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":10.65,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":10.19} +{"Rank":25,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":13.3,"GeoreviewClusteringP2P (rus-Cyrl)":20.25,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":10.21,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":9.43} diff --git a/all_data_tasks/4/default.jsonl b/all_data_tasks/4/default.jsonl index 9f41686989de5f5ad45f1829332a93b41a97c232..a86bbed29eb95485a96711fba65f9b494475e2d3 100644 --- a/all_data_tasks/4/default.jsonl +++ b/all_data_tasks/4/default.jsonl @@ -1,196 +1,57 @@ -{"index":21,"Rank":1,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.16,"ArguAna":83.08,"ClimateFEVER":45.43,"CQADupstackRetrieval":47.31,"DBPedia":51.63,"FEVER":92.83,"FiQA2018":59.67,"HotpotQA":85.14,"MSMARCO":46.79,"NFCorpus":41.85,"NQ":73.88,"QuoraRetrieval":90.95,"SCIDOCS":25.26,"SciFact":79.09,"Touche2020":30.48,"TRECCOVID":79.08} -{"index":138,"Rank":2,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.01,"ArguAna":65.27,"ClimateFEVER":46.11,"CQADupstackRetrieval":47.75,"DBPedia":52.28,"FEVER":94.83,"FiQA2018":60.48,"HotpotQA":76.67,"MSMARCO":45.22,"NFCorpus":42.0,"NQ":71.8,"QuoraRetrieval":90.03,"SCIDOCS":26.64,"SciFact":80.09,"Touche2020":29.94,"TRECCOVID":85.98} -{"index":216,"Rank":3,"Model":"NV-Retriever-v1<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":60.9,"ArguAna":68.28,"ClimateFEVER":43.47,"CQADupstackRetrieval":49.36,"DBPedia":50.82,"FEVER":93.15,"FiQA2018":61.18,"HotpotQA":79.12,"MSMARCO":44.89,"NFCorpus":45.06,"NQ":72.44,"QuoraRetrieval":88.78,"SCIDOCS":22.55,"SciFact":81.31,"Touche2020":26.6,"TRECCOVID":86.44} -{"index":126,"Rank":4,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.25,"ArguAna":64.27,"ClimateFEVER":45.88,"CQADupstackRetrieval":46.43,"DBPedia":52.42,"FEVER":95.11,"FiQA2018":62.03,"HotpotQA":73.08,"MSMARCO":45.98,"NFCorpus":40.6,"NQ":67.0,"QuoraRetrieval":90.09,"SCIDOCS":28.91,"SciFact":79.06,"Touche2020":30.57,"TRECCOVID":82.26} -{"index":205,"Rank":5,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.25,"ArguAna":64.27,"ClimateFEVER":45.88,"CQADupstackRetrieval":46.43,"DBPedia":52.42,"FEVER":95.11,"FiQA2018":62.03,"HotpotQA":73.08,"MSMARCO":45.98,"NFCorpus":40.6,"NQ":67.0,"QuoraRetrieval":90.09,"SCIDOCS":28.91,"SciFact":79.06,"Touche2020":30.57,"TRECCOVID":82.26} -{"index":17,"Rank":6,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":60.25,"ArguAna":64.27,"ClimateFEVER":45.88,"CQADupstackRetrieval":46.43,"DBPedia":52.42,"FEVER":95.11,"FiQA2018":62.03,"HotpotQA":73.08,"MSMARCO":45.98,"NFCorpus":40.6,"NQ":67.0,"QuoraRetrieval":90.09,"SCIDOCS":28.91,"SciFact":79.06,"Touche2020":30.57,"TRECCOVID":82.26} -{"index":58,"Rank":7,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":60.19,"ArguAna":69.65,"ClimateFEVER":39.11,"CQADupstackRetrieval":47.27,"DBPedia":51.32,"FEVER":92.42,"FiQA2018":61.2,"HotpotQA":76.24,"MSMARCO":45.21,"NFCorpus":41.62,"NQ":70.63,"QuoraRetrieval":90.27,"SCIDOCS":21.93,"SciFact":78.32,"Touche2020":30.61,"TRECCOVID":87.1} -{"index":95,"Rank":8,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.18,"ArguAna":62.34,"ClimateFEVER":34.43,"CQADupstackRetrieval":46.11,"DBPedia":51.21,"FEVER":92.16,"FiQA2018":61.77,"HotpotQA":81.36,"MSMARCO":42.18,"NFCorpus":41.34,"NQ":73.96,"QuoraRetrieval":89.58,"SCIDOCS":24.87,"SciFact":85.91,"Touche2020":28.18,"TRECCOVID":87.27} -{"index":215,"Rank":9,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Average":59.36,"ArguAna":68.2,"ClimateFEVER":34.72,"CQADupstackRetrieval":50.51,"DBPedia":48.29,"FEVER":87.77,"FiQA2018":63.1,"HotpotQA":79.92,"MSMARCO":46.49,"NFCorpus":38.04,"NQ":71.22,"QuoraRetrieval":89.21,"SCIDOCS":20.19,"SciFact":78.43,"Touche2020":28.38,"TRECCOVID":85.88} -{"index":23,"Rank":10,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.24,"ArguAna":77.37,"ClimateFEVER":39.37,"CQADupstackRetrieval":47.94,"DBPedia":51.37,"FEVER":90.38,"FiQA2018":60.04,"HotpotQA":83.26,"MSMARCO":45.71,"NFCorpus":38.11,"NQ":71.45,"QuoraRetrieval":90.04,"SCIDOCS":26.93,"SciFact":72.05,"Touche2020":30.26,"TRECCOVID":64.27} -{"index":96,"Rank":11,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":59.0,"ArguAna":67.17,"ClimateFEVER":36.41,"CQADupstackRetrieval":46.49,"DBPedia":49.06,"FEVER":89.35,"FiQA2018":60.4,"HotpotQA":77.02,"MSMARCO":43.41,"NFCorpus":41.88,"NQ":69.92,"QuoraRetrieval":89.78,"SCIDOCS":19.91,"SciFact":77.66,"Touche2020":29.0,"TRECCOVID":87.6} -{"index":139,"Rank":12,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.97,"ArguAna":64.24,"ClimateFEVER":43.53,"CQADupstackRetrieval":44.36,"DBPedia":49.88,"FEVER":90.99,"FiQA2018":56.06,"HotpotQA":71.74,"MSMARCO":43.69,"NFCorpus":41.49,"NQ":69.07,"QuoraRetrieval":89.58,"SCIDOCS":25.04,"SciFact":78.23,"Touche2020":31.45,"TRECCOVID":85.21} -{"index":25,"Rank":13,"Model":"e5-R-mistral-7b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.65,"ArguAna":58.99,"ClimateFEVER":40.26,"CQADupstackRetrieval":46.59,"DBPedia":51.03,"FEVER":90.32,"FiQA2018":58.68,"HotpotQA":80.16,"MSMARCO":42.9,"NFCorpus":41.38,"NQ":69.84,"QuoraRetrieval":90.11,"SCIDOCS":19.26,"SciFact":78.92,"Touche2020":26.89,"TRECCOVID":84.4} -{"index":16,"Rank":14,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.29,"ArguAna":69.72,"ClimateFEVER":42.91,"CQADupstackRetrieval":44.76,"DBPedia":48.69,"FEVER":91.57,"FiQA2018":54.7,"HotpotQA":68.95,"MSMARCO":43.36,"NFCorpus":39.34,"NQ":64.0,"QuoraRetrieval":89.64,"SCIDOCS":24.98,"SciFact":78.44,"Touche2020":27.89,"TRECCOVID":85.38} -{"index":204,"Rank":15,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.29,"ArguAna":69.72,"ClimateFEVER":42.91,"CQADupstackRetrieval":44.76,"DBPedia":48.69,"FEVER":91.57,"FiQA2018":54.7,"HotpotQA":68.95,"MSMARCO":43.36,"NFCorpus":39.34,"NQ":64.0,"QuoraRetrieval":89.64,"SCIDOCS":24.98,"SciFact":78.44,"Touche2020":27.89,"TRECCOVID":85.38} -{"index":6,"Rank":16,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.28,"ArguAna":64.06,"ClimateFEVER":32.65,"CQADupstackRetrieval":46.6,"DBPedia":46.03,"FEVER":91.47,"FiQA2018":59.76,"HotpotQA":70.86,"MSMARCO":40.6,"NFCorpus":40.32,"NQ":65.92,"QuoraRetrieval":87.4,"SCIDOCS":24.32,"SciFact":79.99,"Touche2020":39.16,"TRECCOVID":85.07} -{"index":51,"Rank":17,"Model":"neural-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.12,"ArguAna":67.21,"ClimateFEVER":32.3,"CQADupstackRetrieval":49.11,"DBPedia":48.05,"FEVER":89.46,"FiQA2018":58.94,"HotpotQA":78.87,"MSMARCO":42.0,"NFCorpus":42.6,"NQ":68.36,"QuoraRetrieval":89.02,"SCIDOCS":27.69,"SciFact":78.82,"Touche2020":24.06,"TRECCOVID":75.33} -{"index":19,"Rank":18,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.91,"ArguAna":72.11,"ClimateFEVER":48.36,"CQADupstackRetrieval":42.16,"DBPedia":46.3,"FEVER":93.81,"FiQA2018":63.23,"HotpotQA":68.18,"MSMARCO":42.93,"NFCorpus":36.95,"NQ":56.08,"QuoraRetrieval":89.67,"SCIDOCS":26.35,"SciFact":82.43,"Touche2020":22.55,"TRECCOVID":77.49} -{"index":42,"Rank":19,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":57.36,"ArguAna":63.17,"ClimateFEVER":30.91,"CQADupstackRetrieval":49.42,"DBPedia":46.6,"FEVER":82.74,"FiQA2018":59.91,"HotpotQA":79.4,"MSMARCO":41.96,"NFCorpus":40.86,"NQ":70.3,"QuoraRetrieval":89.47,"SCIDOCS":24.4,"SciFact":79.13,"Touche2020":27.81,"TRECCOVID":74.36} -{"index":219,"Rank":20,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.05,"ArguAna":58.82,"ClimateFEVER":32.47,"CQADupstackRetrieval":49.52,"DBPedia":48.99,"FEVER":90.88,"FiQA2018":53.22,"HotpotQA":77.7,"MSMARCO":40.66,"NFCorpus":41.33,"NQ":64.67,"QuoraRetrieval":88.86,"SCIDOCS":23.01,"SciFact":79.62,"Touche2020":25.24,"TRECCOVID":80.75} -{"index":156,"Rank":21,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":56.89,"ArguAna":61.88,"ClimateFEVER":38.35,"CQADupstackRetrieval":42.97,"DBPedia":48.89,"FEVER":87.84,"FiQA2018":56.59,"HotpotQA":75.72,"MSMARCO":43.06,"NFCorpus":38.62,"NQ":63.53,"QuoraRetrieval":89.61,"SCIDOCS":16.3,"SciFact":76.41,"Touche2020":26.39,"TRECCOVID":87.25} -{"index":62,"Rank":22,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":56.63,"ArguAna":62.78,"ClimateFEVER":34.27,"CQADupstackRetrieval":48.25,"DBPedia":48.34,"FEVER":90.2,"FiQA2018":55.33,"HotpotQA":71.76,"MSMARCO":43.24,"NFCorpus":41.83,"NQ":64.21,"QuoraRetrieval":87.16,"SCIDOCS":22.96,"SciFact":78.22,"Touche2020":20.5,"TRECCOVID":80.34} -{"index":9,"Rank":23,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":56.6,"ArguAna":70.28,"ClimateFEVER":31.95,"CQADupstackRetrieval":46.2,"DBPedia":39.79,"FEVER":91.35,"FiQA2018":52.51,"HotpotQA":75.51,"MSMARCO":37.93,"NFCorpus":43.7,"NQ":64.26,"QuoraRetrieval":87.62,"SCIDOCS":20.24,"SciFact":79.91,"Touche2020":26.8,"TRECCOVID":81.02} -{"index":15,"Rank":24,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":56.24,"ArguAna":62.65,"ClimateFEVER":44.0,"CQADupstackRetrieval":40.64,"DBPedia":48.04,"FEVER":93.35,"FiQA2018":55.31,"HotpotQA":72.25,"MSMARCO":41.68,"NFCorpus":38.25,"NQ":61.79,"QuoraRetrieval":89.61,"SCIDOCS":27.69,"SciFact":75.31,"Touche2020":20.3,"TRECCOVID":72.72} -{"index":64,"Rank":25,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":55.99,"ArguAna":57.48,"ClimateFEVER":35.19,"CQADupstackRetrieval":48.84,"DBPedia":49.58,"FEVER":89.4,"FiQA2018":53.11,"HotpotQA":74.07,"MSMARCO":42.17,"NFCorpus":39.33,"NQ":61.7,"QuoraRetrieval":87.75,"SCIDOCS":22.5,"SciFact":78.86,"Touche2020":22.18,"TRECCOVID":77.69} -{"index":99,"Rank":26,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.98,"ArguAna":59.09,"ClimateFEVER":39.33,"CQADupstackRetrieval":46.97,"DBPedia":45.97,"FEVER":88.22,"FiQA2018":44.71,"HotpotQA":75.18,"MSMARCO":41.68,"NFCorpus":37.65,"NQ":63.11,"QuoraRetrieval":87.41,"SCIDOCS":21.36,"SciFact":73.82,"Touche2020":34.52,"TRECCOVID":80.72} -{"index":121,"Rank":27,"Model":"snowflake-arctic-embed-l-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.98,"ArguAna":59.09,"ClimateFEVER":39.33,"CQADupstackRetrieval":46.97,"DBPedia":45.97,"FEVER":88.22,"FiQA2018":44.71,"HotpotQA":75.18,"MSMARCO":41.68,"NFCorpus":37.65,"NQ":63.11,"QuoraRetrieval":87.41,"SCIDOCS":21.36,"SciFact":73.82,"Touche2020":34.52,"TRECCOVID":80.72} -{"index":1,"Rank":28,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":55.7,"ArguAna":62.18,"ClimateFEVER":33.21,"CQADupstackRetrieval":48.89,"DBPedia":47.12,"FEVER":86.96,"FiQA2018":59.24,"HotpotQA":71.33,"MSMARCO":32.58,"NFCorpus":40.33,"NQ":61.28,"QuoraRetrieval":88.18,"SCIDOCS":20.34,"SciFact":75.42,"Touche2020":25.86,"TRECCOVID":82.62} -{"index":8,"Rank":29,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.58,"ArguAna":58.73,"ClimateFEVER":37.47,"CQADupstackRetrieval":45.11,"DBPedia":43.42,"FEVER":89.71,"FiQA2018":44.79,"HotpotQA":70.46,"MSMARCO":39.66,"NFCorpus":43.33,"NQ":60.65,"QuoraRetrieval":87.83,"SCIDOCS":23.19,"SciFact":73.64,"Touche2020":36.83,"TRECCOVID":78.92} -{"index":178,"Rank":30,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.52,"ArguAna":58.52,"ClimateFEVER":34.56,"CQADupstackRetrieval":46.91,"DBPedia":46.83,"FEVER":91.22,"FiQA2018":54.51,"HotpotQA":76.41,"MSMARCO":43.25,"NFCorpus":39.55,"NQ":62.31,"QuoraRetrieval":89.34,"SCIDOCS":20.17,"SciFact":73.99,"Touche2020":18.52,"TRECCOVID":76.66} -{"index":283,"Rank":31,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.44,"ArguAna":58.05,"ClimateFEVER":30.27,"CQADupstackRetrieval":47.54,"DBPedia":44.76,"FEVER":87.94,"FiQA2018":55.0,"HotpotQA":71.58,"MSMARCO":40.24,"NFCorpus":42.07,"NQ":61.27,"QuoraRetrieval":89.05,"SCIDOCS":23.11,"SciFact":77.77,"Touche2020":23.35,"TRECCOVID":79.56} -{"index":43,"Rank":32,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.09,"ArguAna":59.49,"ClimateFEVER":28.69,"CQADupstackRetrieval":47.63,"DBPedia":46.54,"FEVER":85.02,"FiQA2018":49.89,"HotpotQA":73.83,"MSMARCO":35.55,"NFCorpus":39.05,"NQ":63.87,"QuoraRetrieval":87.7,"SCIDOCS":23.06,"SciFact":77.02,"Touche2020":27.97,"TRECCOVID":81.07} -{"index":34,"Rank":33,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.0,"ArguAna":61.52,"ClimateFEVER":38.43,"CQADupstackRetrieval":41.53,"DBPedia":43.36,"FEVER":88.97,"FiQA2018":42.19,"HotpotQA":70.72,"MSMARCO":42.93,"NFCorpus":38.57,"NQ":61.56,"QuoraRetrieval":88.72,"SCIDOCS":20.31,"SciFact":71.83,"Touche2020":32.42,"TRECCOVID":81.92} -{"index":100,"Rank":34,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.91,"ArguAna":56.44,"ClimateFEVER":39.37,"CQADupstackRetrieval":43.81,"DBPedia":44.73,"FEVER":89.02,"FiQA2018":42.4,"HotpotQA":73.65,"MSMARCO":41.77,"NFCorpus":36.77,"NQ":62.43,"QuoraRetrieval":87.42,"SCIDOCS":21.1,"SciFact":73.55,"Touche2020":31.47,"TRECCOVID":79.65} -{"index":101,"Rank":35,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.83,"ArguAna":60.36,"ClimateFEVER":38.33,"CQADupstackRetrieval":44.21,"DBPedia":45.29,"FEVER":86.89,"FiQA2018":42.16,"HotpotQA":72.76,"MSMARCO":41.97,"NFCorpus":35.87,"NQ":62.34,"QuoraRetrieval":87.72,"SCIDOCS":20.47,"SciFact":69.96,"Touche2020":32.35,"TRECCOVID":81.7} -{"index":108,"Rank":36,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.66,"ArguAna":66.15,"ClimateFEVER":38.53,"CQADupstackRetrieval":40.93,"DBPedia":44.89,"FEVER":88.24,"FiQA2018":44.84,"HotpotQA":73.13,"MSMARCO":41.4,"NFCorpus":38.65,"NQ":55.86,"QuoraRetrieval":89.02,"SCIDOCS":22.98,"SciFact":74.07,"Touche2020":24.93,"TRECCOVID":76.33} -{"index":60,"Rank":37,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":54.6,"ArguAna":56.53,"ClimateFEVER":30.7,"CQADupstackRetrieval":45.94,"DBPedia":48.42,"FEVER":89.93,"FiQA2018":51.28,"HotpotQA":72.99,"MSMARCO":41.46,"NFCorpus":40.33,"NQ":61.24,"QuoraRetrieval":85.59,"SCIDOCS":21.05,"SciFact":77.3,"Touche2020":16.92,"TRECCOVID":79.25} -{"index":194,"Rank":38,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.39,"ArguAna":66.02,"ClimateFEVER":36.09,"CQADupstackRetrieval":41.6,"DBPedia":44.51,"FEVER":86.91,"FiQA2018":45.27,"HotpotQA":72.03,"MSMARCO":41.26,"NFCorpus":38.64,"NQ":55.79,"QuoraRetrieval":88.98,"SCIDOCS":23.32,"SciFact":74.73,"Touche2020":25.2,"TRECCOVID":75.57} -{"index":22,"Rank":39,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":54.29,"ArguAna":63.54,"ClimateFEVER":36.57,"CQADupstackRetrieval":42.23,"DBPedia":44.11,"FEVER":87.18,"FiQA2018":45.02,"HotpotQA":74.1,"MSMARCO":42.49,"NFCorpus":38.13,"NQ":55.03,"QuoraRetrieval":89.07,"SCIDOCS":22.64,"SciFact":74.61,"Touche2020":24.81,"TRECCOVID":74.82} -{"index":18,"Rank":40,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.09,"ArguAna":63.49,"ClimateFEVER":40.36,"CQADupstackRetrieval":39.52,"DBPedia":39.9,"FEVER":94.81,"FiQA2018":48.65,"HotpotQA":67.75,"MSMARCO":42.62,"NFCorpus":35.88,"NQ":52.96,"QuoraRetrieval":88.42,"SCIDOCS":21.92,"SciFact":76.77,"Touche2020":25.22,"TRECCOVID":73.13} -{"index":36,"Rank":41,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.84,"ArguAna":55.11,"ClimateFEVER":29.96,"CQADupstackRetrieval":40.64,"DBPedia":41.0,"FEVER":88.53,"FiQA2018":44.1,"HotpotQA":70.61,"MSMARCO":43.45,"NFCorpus":36.42,"NQ":63.41,"QuoraRetrieval":88.92,"SCIDOCS":19.34,"SciFact":70.05,"Touche2020":32.7,"TRECCOVID":83.37} -{"index":53,"Rank":42,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.46,"ArguAna":66.18,"ClimateFEVER":33.13,"CQADupstackRetrieval":42.74,"DBPedia":42.84,"FEVER":85.44,"FiQA2018":44.93,"HotpotQA":72.49,"MSMARCO":41.46,"NFCorpus":37.9,"NQ":55.12,"QuoraRetrieval":89.07,"SCIDOCS":23.69,"SciFact":75.14,"Touche2020":24.42,"TRECCOVID":67.33} -{"index":117,"Rank":43,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.44,"ArguAna":63.38,"ClimateFEVER":33.99,"CQADupstackRetrieval":43.44,"DBPedia":42.96,"FEVER":86.55,"FiQA2018":44.3,"HotpotQA":70.46,"MSMARCO":41.39,"NFCorpus":38.65,"NQ":56.09,"QuoraRetrieval":88.98,"SCIDOCS":24.06,"SciFact":74.72,"Touche2020":23.45,"TRECCOVID":69.13} -{"index":261,"Rank":44,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.3,"ArguAna":65.99,"ClimateFEVER":30.37,"CQADupstackRetrieval":43.39,"DBPedia":43.31,"FEVER":83.58,"FiQA2018":45.01,"HotpotQA":71.26,"MSMARCO":41.63,"NFCorpus":39.13,"NQ":55.13,"QuoraRetrieval":89.12,"SCIDOCS":23.79,"SciFact":75.25,"Touche2020":23.11,"TRECCOVID":69.38} -{"index":197,"Rank":45,"Model":"b1ade-embed-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.3,"ArguAna":65.99,"ClimateFEVER":30.37,"CQADupstackRetrieval":43.39,"DBPedia":43.31,"FEVER":83.58,"FiQA2018":45.01,"HotpotQA":71.26,"MSMARCO":41.63,"NFCorpus":39.13,"NQ":55.13,"QuoraRetrieval":89.12,"SCIDOCS":23.79,"SciFact":75.25,"Touche2020":23.11,"TRECCOVID":69.38} -{"index":20,"Rank":46,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":53.25,"ArguAna":63.61,"ClimateFEVER":31.17,"CQADupstackRetrieval":42.35,"DBPedia":40.77,"FEVER":86.29,"FiQA2018":40.65,"HotpotQA":72.6,"MSMARCO":41.35,"NFCorpus":37.39,"NQ":54.15,"QuoraRetrieval":88.9,"SCIDOCS":21.73,"SciFact":74.04,"Touche2020":25.7,"TRECCOVID":78.07} -{"index":179,"Rank":47,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.25,"ArguAna":63.61,"ClimateFEVER":31.17,"CQADupstackRetrieval":42.35,"DBPedia":40.77,"FEVER":86.29,"FiQA2018":40.65,"HotpotQA":72.6,"MSMARCO":41.35,"NFCorpus":37.39,"NQ":54.15,"QuoraRetrieval":88.9,"SCIDOCS":21.73,"SciFact":74.04,"Touche2020":25.7,"TRECCOVID":78.07} -{"index":181,"Rank":48,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.25,"ArguAna":63.61,"ClimateFEVER":31.17,"CQADupstackRetrieval":42.35,"DBPedia":40.77,"FEVER":86.29,"FiQA2018":40.65,"HotpotQA":72.6,"MSMARCO":41.35,"NFCorpus":37.39,"NQ":54.15,"QuoraRetrieval":88.9,"SCIDOCS":21.73,"SciFact":74.04,"Touche2020":25.7,"TRECCOVID":78.07} -{"index":180,"Rank":49,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.25,"ArguAna":63.61,"ClimateFEVER":31.17,"CQADupstackRetrieval":42.35,"DBPedia":40.77,"FEVER":86.29,"FiQA2018":40.65,"HotpotQA":72.6,"MSMARCO":41.35,"NFCorpus":37.39,"NQ":54.15,"QuoraRetrieval":88.9,"SCIDOCS":21.73,"SciFact":74.04,"Touche2020":25.7,"TRECCOVID":78.07} -{"index":182,"Rank":50,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.25,"ArguAna":63.61,"ClimateFEVER":31.17,"CQADupstackRetrieval":42.35,"DBPedia":40.77,"FEVER":86.29,"FiQA2018":40.65,"HotpotQA":72.6,"MSMARCO":41.35,"NFCorpus":37.39,"NQ":54.15,"QuoraRetrieval":88.9,"SCIDOCS":21.73,"SciFact":74.04,"Touche2020":25.7,"TRECCOVID":78.07} -{"index":213,"Rank":51,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.01,"ArguAna":48.01,"ClimateFEVER":41.28,"CQADupstackRetrieval":39.61,"DBPedia":43.9,"FEVER":86.34,"FiQA2018":37.46,"HotpotQA":72.62,"MSMARCO":42.53,"NFCorpus":34.67,"NQ":59.72,"QuoraRetrieval":88.0,"SCIDOCS":18.62,"SciFact":70.28,"Touche2020":29.86,"TRECCOVID":82.3} -{"index":29,"Rank":52,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.81,"ArguAna":49.26,"ClimateFEVER":40.5,"CQADupstackRetrieval":38.26,"DBPedia":45.03,"FEVER":84.98,"FiQA2018":38.44,"HotpotQA":73.64,"MSMARCO":43.13,"NFCorpus":35.03,"NQ":59.38,"QuoraRetrieval":87.7,"SCIDOCS":18.29,"SciFact":70.5,"Touche2020":28.15,"TRECCOVID":79.92} -{"index":26,"Rank":53,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.81,"ArguAna":49.26,"ClimateFEVER":40.5,"CQADupstackRetrieval":38.26,"DBPedia":45.03,"FEVER":84.98,"FiQA2018":38.44,"HotpotQA":73.64,"MSMARCO":43.13,"NFCorpus":35.03,"NQ":59.38,"QuoraRetrieval":87.7,"SCIDOCS":18.29,"SciFact":70.5,"Touche2020":28.15,"TRECCOVID":79.92} -{"index":27,"Rank":54,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.81,"ArguAna":49.26,"ClimateFEVER":40.5,"CQADupstackRetrieval":38.26,"DBPedia":45.03,"FEVER":84.98,"FiQA2018":38.44,"HotpotQA":73.64,"MSMARCO":43.13,"NFCorpus":35.03,"NQ":59.38,"QuoraRetrieval":87.7,"SCIDOCS":18.29,"SciFact":70.5,"Touche2020":28.15,"TRECCOVID":79.92} -{"index":129,"Rank":55,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":52.81,"ArguAna":49.26,"ClimateFEVER":40.5,"CQADupstackRetrieval":38.26,"DBPedia":45.03,"FEVER":84.98,"FiQA2018":38.44,"HotpotQA":73.64,"MSMARCO":43.13,"NFCorpus":35.03,"NQ":59.38,"QuoraRetrieval":87.7,"SCIDOCS":18.29,"SciFact":70.5,"Touche2020":28.15,"TRECCOVID":79.92} -{"index":28,"Rank":56,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.81,"ArguAna":49.26,"ClimateFEVER":40.5,"CQADupstackRetrieval":38.26,"DBPedia":45.03,"FEVER":84.98,"FiQA2018":38.44,"HotpotQA":73.64,"MSMARCO":43.13,"NFCorpus":35.03,"NQ":59.38,"QuoraRetrieval":87.7,"SCIDOCS":18.29,"SciFact":70.5,"Touche2020":28.15,"TRECCOVID":79.92} -{"index":206,"Rank":57,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":52.81,"ArguAna":49.26,"ClimateFEVER":40.5,"CQADupstackRetrieval":38.26,"DBPedia":45.03,"FEVER":84.98,"FiQA2018":38.44,"HotpotQA":73.64,"MSMARCO":43.13,"NFCorpus":35.03,"NQ":59.38,"QuoraRetrieval":87.7,"SCIDOCS":18.29,"SciFact":70.5,"Touche2020":28.15,"TRECCOVID":79.92} -{"index":161,"Rank":58,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.47,"ArguAna":58.38,"ClimateFEVER":29.86,"CQADupstackRetrieval":42.71,"DBPedia":38.36,"FEVER":77.99,"FiQA2018":47.71,"HotpotQA":69.32,"MSMARCO":40.43,"NFCorpus":35.53,"NQ":57.75,"QuoraRetrieval":89.15,"SCIDOCS":18.72,"SciFact":71.85,"Touche2020":27.25,"TRECCOVID":82.0} -{"index":0,"Rank":59,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":52.44,"ArguAna":56.27,"ClimateFEVER":29.35,"CQADupstackRetrieval":45.41,"DBPedia":41.91,"FEVER":82.61,"FiQA2018":55.54,"HotpotQA":64.65,"MSMARCO":31.12,"NFCorpus":37.81,"NQ":57.37,"QuoraRetrieval":87.89,"SCIDOCS":18.21,"SciFact":70.86,"Touche2020":27.4,"TRECCOVID":80.13} -{"index":211,"Rank":60,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":52.4,"ArguAna":47.45,"ClimateFEVER":40.7,"CQADupstackRetrieval":39.06,"DBPedia":42.96,"FEVER":85.7,"FiQA2018":36.92,"HotpotQA":71.48,"MSMARCO":42.29,"NFCorpus":33.31,"NQ":58.83,"QuoraRetrieval":87.87,"SCIDOCS":17.88,"SciFact":70.12,"Touche2020":29.24,"TRECCOVID":82.12} -{"index":115,"Rank":61,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.31,"ArguAna":62.62,"ClimateFEVER":31.49,"CQADupstackRetrieval":43.2,"DBPedia":41.7,"FEVER":86.65,"FiQA2018":40.64,"HotpotQA":68.92,"MSMARCO":40.64,"NFCorpus":37.64,"NQ":53.43,"QuoraRetrieval":88.81,"SCIDOCS":23.47,"SciFact":75.29,"Touche2020":20.58,"TRECCOVID":69.6} -{"index":253,"Rank":62,"Model":"gte-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.22,"ArguAna":57.16,"ClimateFEVER":28.82,"CQADupstackRetrieval":43.18,"DBPedia":42.37,"FEVER":84.53,"FiQA2018":44.5,"HotpotQA":67.16,"MSMARCO":40.86,"NFCorpus":38.17,"NQ":54.78,"QuoraRetrieval":88.32,"SCIDOCS":23.44,"SciFact":74.27,"Touche2020":25.51,"TRECCOVID":70.22} -{"index":125,"Rank":63,"Model":"NoInstruct-small-Embedding-v0-Q4_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.99,"ArguAna":57.59,"ClimateFEVER":35.2,"CQADupstackRetrieval":39.65,"DBPedia":41.02,"FEVER":87.13,"FiQA2018":40.65,"HotpotQA":66.54,"MSMARCO":40.23,"NFCorpus":34.92,"NQ":50.9,"QuoraRetrieval":88.41,"SCIDOCS":21.82,"SciFact":72.22,"Touche2020":23.48,"TRECCOVID":80.12} -{"index":119,"Rank":64,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.99,"ArguAna":57.59,"ClimateFEVER":35.2,"CQADupstackRetrieval":39.65,"DBPedia":41.02,"FEVER":87.13,"FiQA2018":40.65,"HotpotQA":66.54,"MSMARCO":40.23,"NFCorpus":34.92,"NQ":50.9,"QuoraRetrieval":88.41,"SCIDOCS":21.82,"SciFact":72.22,"Touche2020":23.48,"TRECCOVID":80.12} -{"index":103,"Rank":65,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.98,"ArguAna":56.87,"ClimateFEVER":31.25,"CQADupstackRetrieval":42.02,"DBPedia":41.59,"FEVER":82.49,"FiQA2018":39.68,"HotpotQA":66.59,"MSMARCO":39.79,"NFCorpus":32.54,"NQ":56.19,"QuoraRetrieval":87.47,"SCIDOCS":19.42,"SciFact":69.92,"Touche2020":32.51,"TRECCOVID":81.39} -{"index":186,"Rank":66,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.92,"ArguAna":64.56,"ClimateFEVER":27.29,"CQADupstackRetrieval":42.39,"DBPedia":41.79,"FEVER":83.69,"FiQA2018":44.3,"HotpotQA":74.33,"MSMARCO":42.03,"NFCorpus":36.91,"NQ":51.77,"QuoraRetrieval":89.09,"SCIDOCS":19.58,"SciFact":73.42,"Touche2020":23.54,"TRECCOVID":64.14} -{"index":170,"Rank":67,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.8,"ArguAna":64.07,"ClimateFEVER":28.74,"CQADupstackRetrieval":41.14,"DBPedia":42.51,"FEVER":81.38,"FiQA2018":42.64,"HotpotQA":72.37,"MSMARCO":40.66,"NFCorpus":38.43,"NQ":51.55,"QuoraRetrieval":88.74,"SCIDOCS":20.16,"SciFact":75.41,"Touche2020":22.6,"TRECCOVID":66.57} -{"index":24,"Rank":68,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":51.68,"ArguAna":59.55,"ClimateFEVER":31.84,"CQADupstackRetrieval":39.05,"DBPedia":40.03,"FEVER":86.64,"FiQA2018":40.34,"HotpotQA":69.94,"MSMARCO":40.83,"NFCorpus":34.3,"NQ":50.18,"QuoraRetrieval":88.78,"SCIDOCS":20.52,"SciFact":71.28,"Touche2020":26.04,"TRECCOVID":75.9} -{"index":282,"Rank":69,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.66,"ArguAna":55.6,"ClimateFEVER":25.8,"CQADupstackRetrieval":42.28,"DBPedia":40.8,"FEVER":84.57,"FiQA2018":50.33,"HotpotQA":62.69,"MSMARCO":37.93,"NFCorpus":37.94,"NQ":56.64,"QuoraRetrieval":88.22,"SCIDOCS":20.44,"SciFact":73.1,"Touche2020":22.31,"TRECCOVID":76.24} -{"index":66,"Rank":70,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":51.44,"ArguAna":51.66,"ClimateFEVER":33.49,"CQADupstackRetrieval":41.73,"DBPedia":43.58,"FEVER":86.81,"FiQA2018":41.0,"HotpotQA":63.85,"MSMARCO":38.32,"NFCorpus":37.12,"NQ":53.89,"QuoraRetrieval":87.37,"SCIDOCS":17.96,"SciFact":72.08,"Touche2020":22.31,"TRECCOVID":80.41} -{"index":207,"Rank":71,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.43,"ArguAna":54.66,"ClimateFEVER":27.01,"CQADupstackRetrieval":38.47,"DBPedia":42.07,"FEVER":77.32,"FiQA2018":39.02,"HotpotQA":64.12,"MSMARCO":43.33,"NFCorpus":35.31,"NQ":61.43,"QuoraRetrieval":88.33,"SCIDOCS":18.7,"SciFact":71.51,"Touche2020":27.86,"TRECCOVID":82.31} -{"index":160,"Rank":72,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":51.43,"ArguAna":54.38,"ClimateFEVER":25.73,"CQADupstackRetrieval":39.68,"DBPedia":41.29,"FEVER":82.81,"FiQA2018":43.8,"HotpotQA":71.23,"MSMARCO":43.7,"NFCorpus":33.99,"NQ":64.06,"QuoraRetrieval":88.18,"SCIDOCS":17.47,"SciFact":70.41,"Touche2020":23.39,"TRECCOVID":71.33} -{"index":193,"Rank":73,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.42,"ArguAna":63.63,"ClimateFEVER":30.73,"CQADupstackRetrieval":39.46,"DBPedia":40.74,"FEVER":82.24,"FiQA2018":41.75,"HotpotQA":63.0,"MSMARCO":38.03,"NFCorpus":37.35,"NQ":54.84,"QuoraRetrieval":88.14,"SCIDOCS":22.78,"SciFact":74.12,"Touche2020":25.89,"TRECCOVID":68.64} -{"index":33,"Rank":74,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.34,"ArguAna":50.81,"ClimateFEVER":28.9,"CQADupstackRetrieval":38.89,"DBPedia":41.03,"FEVER":87.64,"FiQA2018":38.83,"HotpotQA":66.79,"MSMARCO":41.33,"NFCorpus":33.65,"NQ":57.99,"QuoraRetrieval":88.11,"SCIDOCS":18.1,"SciFact":66.69,"Touche2020":31.93,"TRECCOVID":79.36} -{"index":112,"Rank":75,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.31,"ArguAna":51.31,"ClimateFEVER":22.76,"CQADupstackRetrieval":45.27,"DBPedia":36.95,"FEVER":88.17,"FiQA2018":44.8,"HotpotQA":64.15,"MSMARCO":38.0,"NFCorpus":33.94,"NQ":55.1,"QuoraRetrieval":88.41,"SCIDOCS":21.98,"SciFact":70.52,"Touche2020":27.48,"TRECCOVID":80.75} -{"index":252,"Rank":76,"Model":"gte-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.14,"ArguAna":57.12,"ClimateFEVER":28.1,"CQADupstackRetrieval":42.91,"DBPedia":41.19,"FEVER":81.52,"FiQA2018":40.76,"HotpotQA":65.75,"MSMARCO":40.21,"NFCorpus":37.9,"NQ":52.84,"QuoraRetrieval":88.15,"SCIDOCS":23.13,"SciFact":76.18,"Touche2020":22.55,"TRECCOVID":68.78} -{"index":284,"Rank":77,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.08,"ArguAna":55.49,"ClimateFEVER":26.86,"CQADupstackRetrieval":42.58,"DBPedia":39.97,"FEVER":79.42,"FiQA2018":44.91,"HotpotQA":63.63,"MSMARCO":37.02,"NFCorpus":38.33,"NQ":52.86,"QuoraRetrieval":88.83,"SCIDOCS":20.8,"SciFact":73.37,"Touche2020":24.28,"TRECCOVID":77.9} -{"index":210,"Rank":78,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":50.81,"ArguAna":45.44,"ClimateFEVER":39.63,"CQADupstackRetrieval":37.61,"DBPedia":39.42,"FEVER":84.4,"FiQA2018":35.0,"HotpotQA":67.78,"MSMARCO":41.38,"NFCorpus":32.54,"NQ":57.1,"QuoraRetrieval":87.65,"SCIDOCS":16.76,"SciFact":68.24,"Touche2020":28.49,"TRECCOVID":80.65} -{"index":250,"Rank":79,"Model":"ret-phi2-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.78,"ArguAna":48.27,"ClimateFEVER":29.61,"CQADupstackRetrieval":35.76,"DBPedia":40.51,"FEVER":80.14,"FiQA2018":41.08,"HotpotQA":63.19,"MSMARCO":42.72,"NFCorpus":37.82,"NQ":56.39,"QuoraRetrieval":88.1,"SCIDOCS":17.48,"SciFact":71.63,"Touche2020":25.66,"TRECCOVID":83.4} -{"index":155,"Rank":80,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":50.56,"ArguAna":46.42,"ClimateFEVER":22.21,"CQADupstackRetrieval":37.89,"DBPedia":44.02,"FEVER":82.83,"FiQA2018":41.14,"HotpotQA":73.13,"MSMARCO":43.46,"NFCorpus":37.13,"NQ":63.44,"QuoraRetrieval":86.84,"SCIDOCS":20.51,"SciFact":72.24,"Touche2020":20.67,"TRECCOVID":66.54} -{"index":118,"Rank":81,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.43,"ArguAna":59.12,"ClimateFEVER":31.83,"CQADupstackRetrieval":39.89,"DBPedia":39.76,"FEVER":86.92,"FiQA2018":39.15,"HotpotQA":65.2,"MSMARCO":39.73,"NFCorpus":34.69,"NQ":48.69,"QuoraRetrieval":88.43,"SCIDOCS":21.89,"SciFact":70.86,"Touche2020":21.19,"TRECCOVID":69.14} -{"index":140,"Rank":82,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":50.29,"ArguAna":44.49,"ClimateFEVER":26.56,"CQADupstackRetrieval":38.54,"DBPedia":42.23,"FEVER":84.99,"FiQA2018":39.88,"HotpotQA":69.15,"MSMARCO":41.77,"NFCorpus":35.39,"NQ":58.22,"QuoraRetrieval":86.56,"SCIDOCS":18.69,"SciFact":71.94,"Touche2020":26.4,"TRECCOVID":69.6} -{"index":153,"Rank":83,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":50.29,"ArguAna":44.49,"ClimateFEVER":26.56,"CQADupstackRetrieval":38.54,"DBPedia":42.23,"FEVER":84.99,"FiQA2018":39.88,"HotpotQA":69.15,"MSMARCO":41.77,"NFCorpus":35.39,"NQ":58.22,"QuoraRetrieval":86.56,"SCIDOCS":18.69,"SciFact":71.94,"Touche2020":26.4,"TRECCOVID":69.6} -{"index":83,"Rank":84,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.25,"ArguAna":51.38,"ClimateFEVER":30.46,"CQADupstackRetrieval":39.4,"DBPedia":39.87,"FEVER":78.24,"FiQA2018":37.2,"HotpotQA":59.26,"MSMARCO":39.91,"NFCorpus":36.21,"NQ":52.41,"QuoraRetrieval":84.58,"SCIDOCS":19.87,"SciFact":74.7,"Touche2020":25.43,"TRECCOVID":84.88} -{"index":35,"Rank":85,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.15,"ArguAna":53.37,"ClimateFEVER":24.8,"CQADupstackRetrieval":38.09,"DBPedia":38.05,"FEVER":85.52,"FiQA2018":36.16,"HotpotQA":66.88,"MSMARCO":40.49,"NFCorpus":32.12,"NQ":55.51,"QuoraRetrieval":87.85,"SCIDOCS":17.1,"SciFact":67.95,"Touche2020":29.48,"TRECCOVID":78.93} -{"index":104,"Rank":86,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.15,"ArguAna":52.08,"ClimateFEVER":29.88,"CQADupstackRetrieval":40.12,"DBPedia":40.2,"FEVER":83.4,"FiQA2018":34.52,"HotpotQA":65.25,"MSMARCO":39.43,"NFCorpus":30.89,"NQ":54.76,"QuoraRetrieval":86.57,"SCIDOCS":18.36,"SciFact":64.51,"Touche2020":32.79,"TRECCOVID":79.43} -{"index":151,"Rank":87,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.1,"ArguAna":60.63,"ClimateFEVER":29.0,"CQADupstackRetrieval":41.14,"DBPedia":39.64,"FEVER":79.13,"FiQA2018":38.62,"HotpotQA":68.22,"MSMARCO":40.95,"NFCorpus":37.51,"NQ":50.2,"QuoraRetrieval":88.72,"SCIDOCS":18.58,"SciFact":72.51,"Touche2020":21.9,"TRECCOVID":64.79} -{"index":285,"Rank":88,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":50.02,"ArguAna":55.98,"ClimateFEVER":27.08,"CQADupstackRetrieval":34.27,"DBPedia":42.7,"FEVER":78.55,"FiQA2018":41.57,"HotpotQA":67.01,"MSMARCO":38.9,"NFCorpus":36.66,"NQ":55.84,"QuoraRetrieval":84.69,"SCIDOCS":16.24,"SciFact":71.8,"Touche2020":26.27,"TRECCOVID":72.72} -{"index":154,"Rank":89,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":49.99,"ArguAna":49.35,"ClimateFEVER":22.4,"CQADupstackRetrieval":39.44,"DBPedia":42.39,"FEVER":65.03,"FiQA2018":38.56,"HotpotQA":63.33,"MSMARCO":44.05,"NFCorpus":36.07,"NQ":62.86,"QuoraRetrieval":88.18,"SCIDOCS":20.12,"SciFact":72.58,"Touche2020":27.21,"TRECCOVID":78.32} -{"index":254,"Rank":90,"Model":"gte-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.46,"ArguAna":55.44,"ClimateFEVER":26.54,"CQADupstackRetrieval":39.98,"DBPedia":39.1,"FEVER":81.55,"FiQA2018":39.35,"HotpotQA":63.79,"MSMARCO":40.31,"NFCorpus":34.77,"NQ":50.29,"QuoraRetrieval":88.02,"SCIDOCS":21.38,"SciFact":72.7,"Touche2020":22.22,"TRECCOVID":66.53} -{"index":169,"Rank":91,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.34,"ArguAna":52.21,"ClimateFEVER":26.79,"CQADupstackRetrieval":39.29,"DBPedia":37.62,"FEVER":73.98,"FiQA2018":36.7,"HotpotQA":56.66,"MSMARCO":42.02,"NFCorpus":36.02,"NQ":53.31,"QuoraRetrieval":88.28,"SCIDOCS":18.94,"SciFact":69.29,"Touche2020":25.19,"TRECCOVID":83.82} -{"index":149,"Rank":92,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":49.26,"ArguAna":55.65,"ClimateFEVER":26.54,"CQADupstackRetrieval":43.09,"DBPedia":40.24,"FEVER":70.03,"FiQA2018":46.96,"HotpotQA":55.88,"MSMARCO":41.61,"NFCorpus":36.0,"NQ":57.24,"QuoraRetrieval":88.85,"SCIDOCS":17.36,"SciFact":64.56,"Touche2020":23.44,"TRECCOVID":71.4} -{"index":281,"Rank":93,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.25,"ArguAna":57.44,"ClimateFEVER":21.64,"CQADupstackRetrieval":41.69,"DBPedia":39.39,"FEVER":74.99,"FiQA2018":44.41,"HotpotQA":60.9,"MSMARCO":40.91,"NFCorpus":36.97,"NQ":51.58,"QuoraRetrieval":87.6,"SCIDOCS":18.36,"SciFact":72.75,"Touche2020":21.61,"TRECCOVID":68.47} -{"index":158,"Rank":94,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.04,"ArguAna":41.67,"ClimateFEVER":22.87,"CQADupstackRetrieval":37.07,"DBPedia":41.32,"FEVER":81.64,"FiQA2018":37.43,"HotpotQA":66.61,"MSMARCO":41.46,"NFCorpus":32.45,"NQ":59.11,"QuoraRetrieval":85.71,"SCIDOCS":17.77,"SciFact":68.85,"Touche2020":27.12,"TRECCOVID":74.53} -{"index":159,"Rank":95,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":48.88,"ArguAna":44.23,"ClimateFEVER":23.86,"CQADupstackRetrieval":38.52,"DBPedia":40.36,"FEVER":79.44,"FiQA2018":38.17,"HotpotQA":68.56,"MSMARCO":42.27,"NFCorpus":32.46,"NQ":60.02,"QuoraRetrieval":87.65,"SCIDOCS":17.16,"SciFact":69.35,"Touche2020":21.35,"TRECCOVID":69.76} -{"index":152,"Rank":96,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":48.75,"ArguAna":51.41,"ClimateFEVER":15.38,"CQADupstackRetrieval":38.92,"DBPedia":41.02,"FEVER":58.24,"FiQA2018":36.37,"HotpotQA":62.21,"MSMARCO":43.14,"NFCorpus":36.59,"NQ":59.97,"QuoraRetrieval":87.92,"SCIDOCS":18.99,"SciFact":73.08,"Touche2020":28.31,"TRECCOVID":79.64} -{"index":238,"Rank":97,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":48.48,"ArguAna":53.77,"ClimateFEVER":27.21,"CQADupstackRetrieval":38.56,"DBPedia":41.28,"FEVER":74.08,"FiQA2018":46.78,"HotpotQA":59.67,"MSMARCO":44.05,"NFCorpus":34.18,"NQ":57.24,"QuoraRetrieval":89.09,"SCIDOCS":15.88,"SciFact":66.77,"Touche2020":26.76,"TRECCOVID":51.9} -{"index":123,"Rank":98,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.22,"ArguAna":47.28,"ClimateFEVER":29.39,"CQADupstackRetrieval":39.62,"DBPedia":39.03,"FEVER":73.97,"FiQA2018":35.84,"HotpotQA":57.26,"MSMARCO":41.12,"NFCorpus":35.78,"NQ":53.15,"QuoraRetrieval":74.71,"SCIDOCS":18.62,"SciFact":72.11,"Touche2020":23.98,"TRECCOVID":81.37} -{"index":208,"Rank":99,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.0,"ArguAna":54.81,"ClimateFEVER":24.71,"CQADupstackRetrieval":41.4,"DBPedia":40.2,"FEVER":74.39,"FiQA2018":39.86,"HotpotQA":63.7,"MSMARCO":34.99,"NFCorpus":35.68,"NQ":48.55,"QuoraRetrieval":88.19,"SCIDOCS":20.17,"SciFact":71.98,"Touche2020":19.17,"TRECCOVID":62.2} -{"index":237,"Rank":100,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":47.96,"ArguAna":52.81,"ClimateFEVER":27.01,"CQADupstackRetrieval":37.35,"DBPedia":39.74,"FEVER":72.18,"FiQA2018":44.19,"HotpotQA":58.91,"MSMARCO":43.52,"NFCorpus":33.34,"NQ":56.16,"QuoraRetrieval":88.91,"SCIDOCS":15.71,"SciFact":64.2,"Touche2020":25.26,"TRECCOVID":60.09} -{"index":175,"Rank":101,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":47.87,"ArguAna":44.18,"ClimateFEVER":23.53,"CQADupstackRetrieval":39.34,"DBPedia":35.05,"FEVER":72.33,"FiQA2018":41.58,"HotpotQA":61.38,"MSMARCO":40.92,"NFCorpus":32.45,"NQ":60.44,"QuoraRetrieval":88.2,"SCIDOCS":19.86,"SciFact":66.68,"Touche2020":26.24,"TRECCOVID":65.91} -{"index":209,"Rank":102,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":47.75,"ArguAna":43.4,"ClimateFEVER":36.52,"CQADupstackRetrieval":34.67,"DBPedia":36.22,"FEVER":80.48,"FiQA2018":32.08,"HotpotQA":60.09,"MSMARCO":39.99,"NFCorpus":30.72,"NQ":53.62,"QuoraRetrieval":87.07,"SCIDOCS":15.56,"SciFact":64.28,"Touche2020":26.99,"TRECCOVID":74.58} -{"index":167,"Rank":103,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.67,"ArguAna":48.33,"ClimateFEVER":23.36,"CQADupstackRetrieval":37.82,"DBPedia":34.54,"FEVER":71.96,"FiQA2018":35.12,"HotpotQA":55.12,"MSMARCO":40.25,"NFCorpus":33.66,"NQ":50.62,"QuoraRetrieval":88.01,"SCIDOCS":18.5,"SciFact":69.43,"Touche2020":25.17,"TRECCOVID":83.21} -{"index":148,"Rank":104,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":47.57,"ArguAna":57.05,"ClimateFEVER":27.74,"CQADupstackRetrieval":43.82,"DBPedia":36.68,"FEVER":72.69,"FiQA2018":45.45,"HotpotQA":55.18,"MSMARCO":39.65,"NFCorpus":34.09,"NQ":50.1,"QuoraRetrieval":88.44,"SCIDOCS":18.55,"SciFact":64.43,"Touche2020":21.56,"TRECCOVID":58.06} -{"index":236,"Rank":105,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":47.42,"ArguAna":52.09,"ClimateFEVER":26.9,"CQADupstackRetrieval":36.62,"DBPedia":39.55,"FEVER":72.66,"FiQA2018":42.79,"HotpotQA":57.85,"MSMARCO":42.73,"NFCorpus":32.63,"NQ":55.09,"QuoraRetrieval":88.47,"SCIDOCS":15.51,"SciFact":63.42,"Touche2020":28.29,"TRECCOVID":56.68} -{"index":162,"Rank":106,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":46.64,"ArguAna":39.06,"ClimateFEVER":22.55,"CQADupstackRetrieval":36.07,"DBPedia":37.76,"FEVER":75.27,"FiQA2018":33.31,"HotpotQA":65.09,"MSMARCO":40.99,"NFCorpus":31.01,"NQ":56.29,"QuoraRetrieval":86.93,"SCIDOCS":13.89,"SciFact":67.7,"Touche2020":21.16,"TRECCOVID":72.57} -{"index":82,"Rank":107,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.54,"ArguAna":50.49,"ClimateFEVER":27.11,"CQADupstackRetrieval":36.53,"DBPedia":34.7,"FEVER":72.73,"FiQA2018":33.29,"HotpotQA":52.84,"MSMARCO":38.83,"NFCorpus":33.89,"NQ":46.7,"QuoraRetrieval":85.6,"SCIDOCS":16.57,"SciFact":70.17,"Touche2020":23.44,"TRECCOVID":75.17} -{"index":157,"Rank":108,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":46.01,"ArguAna":46.69,"ClimateFEVER":15.81,"CQADupstackRetrieval":36.08,"DBPedia":38.64,"FEVER":53.52,"FiQA2018":34.8,"HotpotQA":56.34,"MSMARCO":42.33,"NFCorpus":33.93,"NQ":58.73,"QuoraRetrieval":87.71,"SCIDOCS":16.42,"SciFact":65.6,"Touche2020":26.81,"TRECCOVID":76.78} -{"index":166,"Rank":109,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.27,"ArguAna":46.67,"ClimateFEVER":25.56,"CQADupstackRetrieval":34.3,"DBPedia":32.61,"FEVER":67.22,"FiQA2018":31.29,"HotpotQA":51.67,"MSMARCO":38.27,"NFCorpus":30.7,"NQ":46.16,"QuoraRetrieval":87.01,"SCIDOCS":17.23,"SciFact":65.42,"Touche2020":24.92,"TRECCOVID":80.03} -{"index":177,"Rank":110,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.14,"ArguAna":46.73,"ClimateFEVER":24.05,"CQADupstackRetrieval":38.03,"DBPedia":32.65,"FEVER":68.02,"FiQA2018":33.43,"HotpotQA":56.48,"MSMARCO":37.28,"NFCorpus":30.4,"NQ":51.59,"QuoraRetrieval":87.19,"SCIDOCS":18.61,"SciFact":63.89,"Touche2020":23.52,"TRECCOVID":65.18} -{"index":116,"Rank":111,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.12,"ArguAna":51.25,"ClimateFEVER":25.16,"CQADupstackRetrieval":41.77,"DBPedia":34.83,"FEVER":73.17,"FiQA2018":37.85,"HotpotQA":52.59,"MSMARCO":36.54,"NFCorpus":31.34,"NQ":46.1,"QuoraRetrieval":88.07,"SCIDOCS":21.44,"SciFact":64.4,"Touche2020":20.79,"TRECCOVID":51.47} -{"index":147,"Rank":112,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":45.12,"ArguAna":52.03,"ClimateFEVER":27.95,"CQADupstackRetrieval":40.95,"DBPedia":33.34,"FEVER":71.85,"FiQA2018":39.18,"HotpotQA":54.19,"MSMARCO":37.76,"NFCorpus":31.59,"NQ":45.88,"QuoraRetrieval":88.19,"SCIDOCS":17.09,"SciFact":57.83,"Touche2020":20.37,"TRECCOVID":58.55} -{"index":185,"Rank":113,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.99,"ArguAna":39.21,"ClimateFEVER":25.02,"CQADupstackRetrieval":38.91,"DBPedia":38.79,"FEVER":78.0,"FiQA2018":45.02,"HotpotQA":57.14,"MSMARCO":36.51,"NFCorpus":31.57,"NQ":52.83,"QuoraRetrieval":87.79,"SCIDOCS":15.62,"SciFact":69.32,"Touche2020":13.87,"TRECCOVID":45.22} -{"index":107,"Rank":114,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.92,"ArguAna":53.64,"ClimateFEVER":24.71,"CQADupstackRetrieval":37.8,"DBPedia":35.97,"FEVER":70.11,"FiQA2018":31.42,"HotpotQA":55.7,"MSMARCO":34.51,"NFCorpus":32.04,"NQ":43.03,"QuoraRetrieval":87.04,"SCIDOCS":19.07,"SciFact":67.51,"Touche2020":22.08,"TRECCOVID":59.21} -{"index":172,"Rank":115,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.81,"ArguAna":46.48,"ClimateFEVER":21.21,"CQADupstackRetrieval":37.96,"DBPedia":34.13,"FEVER":71.9,"FiQA2018":37.27,"HotpotQA":54.95,"MSMARCO":40.34,"NFCorpus":32.24,"NQ":51.4,"QuoraRetrieval":88.09,"SCIDOCS":18.45,"SciFact":59.76,"Touche2020":20.73,"TRECCOVID":57.25} -{"index":235,"Rank":116,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":44.67,"ArguAna":50.83,"ClimateFEVER":24.88,"CQADupstackRetrieval":34.55,"DBPedia":35.24,"FEVER":68.93,"FiQA2018":35.15,"HotpotQA":54.93,"MSMARCO":41.16,"NFCorpus":30.22,"NQ":50.47,"QuoraRetrieval":87.98,"SCIDOCS":14.0,"SciFact":59.74,"Touche2020":25.89,"TRECCOVID":56.05} -{"index":79,"Rank":117,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.49,"ArguAna":49.68,"ClimateFEVER":26.6,"CQADupstackRetrieval":33.33,"DBPedia":31.51,"FEVER":68.12,"FiQA2018":29.99,"HotpotQA":49.93,"MSMARCO":36.05,"NFCorpus":32.08,"NQ":42.94,"QuoraRetrieval":85.28,"SCIDOCS":16.18,"SciFact":68.29,"Touche2020":24.45,"TRECCOVID":72.98} -{"index":171,"Rank":118,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.03,"ArguAna":49.01,"ClimateFEVER":21.48,"CQADupstackRetrieval":37.48,"DBPedia":32.44,"FEVER":73.29,"FiQA2018":34.06,"HotpotQA":52.78,"MSMARCO":37.77,"NFCorpus":30.38,"NQ":47.88,"QuoraRetrieval":87.63,"SCIDOCS":17.63,"SciFact":59.39,"Touche2020":18.59,"TRECCOVID":60.57} -{"index":69,"Rank":119,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.97,"ArguAna":55.78,"ClimateFEVER":21.23,"CQADupstackRetrieval":38.2,"DBPedia":33.88,"FEVER":63.97,"FiQA2018":30.71,"HotpotQA":54.21,"MSMARCO":33.61,"NFCorpus":32.04,"NQ":42.47,"QuoraRetrieval":86.03,"SCIDOCS":18.64,"SciFact":67.31,"Touche2020":21.12,"TRECCOVID":60.32} -{"index":230,"Rank":120,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":43.81,"ArguAna":46.52,"ClimateFEVER":21.97,"CQADupstackRetrieval":44.96,"DBPedia":32.09,"FEVER":50.86,"FiQA2018":49.96,"HotpotQA":39.29,"MSMARCO":39.75,"NFCorpus":33.29,"NQ":50.45,"QuoraRetrieval":87.46,"SCIDOCS":23.76,"SciFact":65.57,"Touche2020":19.93,"TRECCOVID":51.33} -{"index":184,"Rank":121,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.39,"ArguAna":40.77,"ClimateFEVER":21.84,"CQADupstackRetrieval":33.15,"DBPedia":37.47,"FEVER":61.77,"FiQA2018":39.21,"HotpotQA":55.84,"MSMARCO":35.36,"NFCorpus":31.57,"NQ":47.73,"QuoraRetrieval":87.96,"SCIDOCS":19.48,"SciFact":68.35,"Touche2020":15.22,"TRECCOVID":55.07} -{"index":228,"Rank":122,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":42.69,"ArguAna":47.13,"ClimateFEVER":21.57,"CQADupstackRetrieval":42.53,"DBPedia":33.35,"FEVER":55.9,"FiQA2018":37.27,"HotpotQA":44.59,"MSMARCO":39.03,"NFCorpus":32.25,"NQ":46.47,"QuoraRetrieval":87.75,"SCIDOCS":21.82,"SciFact":62.64,"Touche2020":17.22,"TRECCOVID":50.82} -{"index":106,"Rank":123,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.56,"ArguAna":55.31,"ClimateFEVER":25.35,"CQADupstackRetrieval":35.07,"DBPedia":32.25,"FEVER":74.99,"FiQA2018":25.59,"HotpotQA":53.91,"MSMARCO":31.01,"NFCorpus":31.86,"NQ":34.94,"QuoraRetrieval":85.72,"SCIDOCS":17.69,"SciFact":66.27,"Touche2020":18.1,"TRECCOVID":50.38} -{"index":246,"Rank":124,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":42.24,"ArguAna":39.85,"ClimateFEVER":14.63,"CQADupstackRetrieval":44.65,"DBPedia":39.19,"FEVER":51.2,"FiQA2018":46.68,"HotpotQA":42.14,"MSMARCO":27.67,"NFCorpus":35.08,"NQ":52.87,"QuoraRetrieval":85.96,"SCIDOCS":17.17,"SciFact":55.38,"Touche2020":21.65,"TRECCOVID":59.48} -{"index":229,"Rank":125,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":41.95,"ArguAna":50.17,"ClimateFEVER":20.27,"CQADupstackRetrieval":41.32,"DBPedia":32.33,"FEVER":51.93,"FiQA2018":36.87,"HotpotQA":46.51,"MSMARCO":36.54,"NFCorpus":31.59,"NQ":43.87,"QuoraRetrieval":87.56,"SCIDOCS":21.64,"SciFact":64.51,"Touche2020":16.9,"TRECCOVID":47.25} -{"index":214,"Rank":126,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":41.88,"ArguAna":48.32,"ClimateFEVER":24.79,"CQADupstackRetrieval":33.67,"DBPedia":38.1,"FEVER":59.29,"FiQA2018":27.42,"HotpotQA":56.81,"MSMARCO":36.77,"NFCorpus":31.32,"NQ":41.83,"QuoraRetrieval":86.72,"SCIDOCS":17.12,"SciFact":65.51,"Touche2020":15.79,"TRECCOVID":44.77} -{"index":183,"Rank":127,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.41,"ArguAna":32.93,"ClimateFEVER":25.94,"CQADupstackRetrieval":33.12,"DBPedia":35.47,"FEVER":56.41,"FiQA2018":37.75,"HotpotQA":54.66,"MSMARCO":38.29,"NFCorpus":30.16,"NQ":47.61,"QuoraRetrieval":87.85,"SCIDOCS":10.59,"SciFact":66.23,"Touche2020":13.31,"TRECCOVID":50.81} -{"index":168,"Rank":128,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.19,"ArguAna":44.55,"ClimateFEVER":21.02,"CQADupstackRetrieval":31.43,"DBPedia":28.22,"FEVER":61.1,"FiQA2018":24.49,"HotpotQA":46.05,"MSMARCO":34.18,"NFCorpus":27.34,"NQ":40.05,"QuoraRetrieval":86.04,"SCIDOCS":15.31,"SciFact":61.74,"Touche2020":23.35,"TRECCOVID":72.99} -{"index":134,"Rank":129,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":41.17,"ArguAna":49.11,"ClimateFEVER":23.29,"CQADupstackRetrieval":39.04,"DBPedia":32.04,"FEVER":52.63,"FiQA2018":36.35,"HotpotQA":45.66,"MSMARCO":36.83,"NFCorpus":29.67,"NQ":44.48,"QuoraRetrieval":87.21,"SCIDOCS":15.78,"SciFact":54.03,"Touche2020":15.61,"TRECCOVID":55.85} -{"index":212,"Rank":130,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":40.92,"ArguAna":37.16,"ClimateFEVER":31.48,"CQADupstackRetrieval":28.72,"DBPedia":28.19,"FEVER":70.24,"FiQA2018":25.78,"HotpotQA":43.07,"MSMARCO":35.95,"NFCorpus":26.03,"NQ":45.54,"QuoraRetrieval":85.83,"SCIDOCS":12.09,"SciFact":52.71,"Touche2020":23.13,"TRECCOVID":67.83} -{"index":105,"Rank":131,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.82,"ArguAna":51.51,"ClimateFEVER":24.77,"CQADupstackRetrieval":33.64,"DBPedia":31.06,"FEVER":69.64,"FiQA2018":24.37,"HotpotQA":49.73,"MSMARCO":27.85,"NFCorpus":30.04,"NQ":30.73,"QuoraRetrieval":85.29,"SCIDOCS":17.26,"SciFact":65.41,"Touche2020":18.34,"TRECCOVID":52.62} -{"index":12,"Rank":132,"Model":"bm25s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.84,"ArguAna":49.28,"ClimateFEVER":13.62,"CQADupstackRetrieval":31.86,"DBPedia":29.91,"FEVER":48.09,"FiQA2018":25.14,"HotpotQA":56.91,"MSMARCO":21.89,"NFCorpus":32.08,"NQ":28.5,"QuoraRetrieval":80.42,"SCIDOCS":15.78,"SciFact":68.7,"Touche2020":33.05,"TRECCOVID":62.31} -{"index":63,"Rank":133,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":39.19,"ArguAna":51.73,"ClimateFEVER":23.58,"CQADupstackRetrieval":32.4,"DBPedia":26.78,"FEVER":53.42,"FiQA2018":28.56,"HotpotQA":52.37,"MSMARCO":17.47,"NFCorpus":26.28,"NQ":37.65,"QuoraRetrieval":84.64,"SCIDOCS":10.39,"SciFact":66.36,"Touche2020":12.82,"TRECCOVID":63.34} -{"index":173,"Rank":134,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.91,"ArguAna":43.57,"ClimateFEVER":17.25,"CQADupstackRetrieval":33.74,"DBPedia":28.28,"FEVER":69.12,"FiQA2018":25.19,"HotpotQA":47.48,"MSMARCO":31.8,"NFCorpus":25.96,"NQ":38.89,"QuoraRetrieval":85.69,"SCIDOCS":15.29,"SciFact":52.4,"Touche2020":16.67,"TRECCOVID":52.3} -{"index":132,"Rank":135,"Model":"embedder-100p<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.79,"ArguAna":52.03,"ClimateFEVER":21.82,"CQADupstackRetrieval":34.46,"DBPedia":26.51,"FEVER":56.32,"FiQA2018":30.59,"HotpotQA":42.09,"MSMARCO":27.68,"NFCorpus":26.43,"NQ":37.93,"QuoraRetrieval":85.56,"SCIDOCS":16.6,"SciFact":59.95,"Touche2020":18.85,"TRECCOVID":45.1} -{"index":245,"Rank":136,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":38.47,"ArguAna":39.4,"ClimateFEVER":10.61,"CQADupstackRetrieval":40.78,"DBPedia":33.65,"FEVER":36.12,"FiQA2018":44.71,"HotpotQA":37.17,"MSMARCO":25.17,"NFCorpus":33.18,"NQ":46.29,"QuoraRetrieval":85.85,"SCIDOCS":15.97,"SciFact":50.91,"Touche2020":22.51,"TRECCOVID":54.77} -{"index":65,"Rank":137,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":38.05,"ArguAna":51.0,"ClimateFEVER":22.97,"CQADupstackRetrieval":33.37,"DBPedia":25.48,"FEVER":45.11,"FiQA2018":27.24,"HotpotQA":54.54,"MSMARCO":19.13,"NFCorpus":27.16,"NQ":34.16,"QuoraRetrieval":84.4,"SCIDOCS":15.35,"SciFact":68.68,"Touche2020":6.54,"TRECCOVID":55.67} -{"index":68,"Rank":138,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.94,"ArguAna":44.8,"ClimateFEVER":17.69,"CQADupstackRetrieval":38.2,"DBPedia":32.97,"FEVER":45.91,"FiQA2018":21.29,"HotpotQA":51.52,"MSMARCO":29.49,"NFCorpus":26.78,"NQ":32.6,"QuoraRetrieval":85.3,"SCIDOCS":15.31,"SciFact":63.23,"Touche2020":16.3,"TRECCOVID":47.77} -{"index":80,"Rank":139,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.04,"ArguAna":45.42,"ClimateFEVER":21.86,"CQADupstackRetrieval":27.25,"DBPedia":22.72,"FEVER":60.45,"FiQA2018":21.12,"HotpotQA":40.88,"MSMARCO":27.98,"NFCorpus":22.79,"NQ":29.73,"QuoraRetrieval":72.98,"SCIDOCS":12.21,"SciFact":56.9,"Touche2020":22.97,"TRECCOVID":70.3} -{"index":44,"Rank":140,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.99,"ArguAna":49.82,"ClimateFEVER":23.22,"CQADupstackRetrieval":31.75,"DBPedia":27.77,"FEVER":59.39,"FiQA2018":25.33,"HotpotQA":41.66,"MSMARCO":24.56,"NFCorpus":24.76,"NQ":32.94,"QuoraRetrieval":84.78,"SCIDOCS":15.26,"SciFact":56.03,"Touche2020":19.0,"TRECCOVID":38.58} -{"index":61,"Rank":141,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":36.75,"ArguAna":47.09,"ClimateFEVER":20.67,"CQADupstackRetrieval":30.78,"DBPedia":25.81,"FEVER":43.48,"FiQA2018":24.62,"HotpotQA":48.46,"MSMARCO":18.81,"NFCorpus":26.81,"NQ":33.21,"QuoraRetrieval":86.15,"SCIDOCS":10.0,"SciFact":64.48,"Touche2020":10.18,"TRECCOVID":60.67} -{"index":244,"Rank":142,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":36.71,"ArguAna":39.27,"ClimateFEVER":11.36,"CQADupstackRetrieval":38.96,"DBPedia":31.55,"FEVER":36.21,"FiQA2018":43.55,"HotpotQA":33.95,"MSMARCO":23.96,"NFCorpus":31.1,"NQ":42.02,"QuoraRetrieval":85.73,"SCIDOCS":15.38,"SciFact":49.91,"Touche2020":21.63,"TRECCOVID":46.11} -{"index":113,"Rank":143,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.08,"ArguAna":52.45,"ClimateFEVER":19.0,"CQADupstackRetrieval":30.71,"DBPedia":25.27,"FEVER":50.13,"FiQA2018":22.14,"HotpotQA":41.33,"MSMARCO":22.15,"NFCorpus":29.05,"NQ":23.45,"QuoraRetrieval":83.63,"SCIDOCS":14.95,"SciFact":61.96,"Touche2020":17.47,"TRECCOVID":47.52} -{"index":242,"Rank":144,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":35.34,"ArguAna":48.91,"ClimateFEVER":15.27,"CQADupstackRetrieval":31.32,"DBPedia":26.22,"FEVER":56.76,"FiQA2018":22.96,"HotpotQA":37.03,"MSMARCO":26.6,"NFCorpus":25.49,"NQ":33.6,"QuoraRetrieval":86.4,"SCIDOCS":13.97,"SciFact":50.3,"Touche2020":17.4,"TRECCOVID":37.87} -{"index":243,"Rank":145,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":33.63,"ArguAna":44.85,"ClimateFEVER":10.37,"CQADupstackRetrieval":35.23,"DBPedia":27.77,"FEVER":26.17,"FiQA2018":34.83,"HotpotQA":33.2,"MSMARCO":20.7,"NFCorpus":28.65,"NQ":36.32,"QuoraRetrieval":85.49,"SCIDOCS":14.15,"SciFact":45.76,"Touche2020":20.3,"TRECCOVID":40.7} -{"index":239,"Rank":146,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":32.96,"ArguAna":45.15,"ClimateFEVER":16.96,"CQADupstackRetrieval":27.72,"DBPedia":27.86,"FEVER":45.68,"FiQA2018":15.62,"HotpotQA":35.61,"MSMARCO":29.57,"NFCorpus":22.29,"NQ":29.85,"QuoraRetrieval":86.51,"SCIDOCS":10.13,"SciFact":52.31,"Touche2020":8.57,"TRECCOVID":40.54} -{"index":241,"Rank":147,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":32.45,"ArguAna":44.88,"ClimateFEVER":18.49,"CQADupstackRetrieval":30.7,"DBPedia":22.63,"FEVER":52.66,"FiQA2018":20.33,"HotpotQA":30.01,"MSMARCO":23.72,"NFCorpus":23.45,"NQ":29.8,"QuoraRetrieval":86.55,"SCIDOCS":0.03,"SciFact":48.37,"Touche2020":16.06,"TRECCOVID":39.12} -{"index":84,"Rank":148,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":32.34,"ArguAna":35.07,"ClimateFEVER":17.57,"CQADupstackRetrieval":29.98,"DBPedia":26.1,"FEVER":38.64,"FiQA2018":18.59,"HotpotQA":33.99,"MSMARCO":15.83,"NFCorpus":28.26,"NQ":24.63,"QuoraRetrieval":84.68,"SCIDOCS":13.55,"SciFact":46.66,"Touche2020":16.18,"TRECCOVID":55.35} -{"index":72,"Rank":149,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.33,"ArguAna":35.87,"ClimateFEVER":13.11,"CQADupstackRetrieval":21.69,"DBPedia":29.33,"FEVER":35.58,"FiQA2018":8.1,"HotpotQA":47.03,"MSMARCO":19.8,"NFCorpus":18.75,"NQ":23.25,"QuoraRetrieval":82.3,"SCIDOCS":11.12,"SciFact":50.24,"Touche2020":8.0,"TRECCOVID":35.74} -{"index":70,"Rank":150,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":26.17,"ArguAna":31.14,"ClimateFEVER":10.02,"CQADupstackRetrieval":18.63,"DBPedia":25.66,"FEVER":29.18,"FiQA2018":6.42,"HotpotQA":39.88,"MSMARCO":16.53,"NFCorpus":15.64,"NQ":19.99,"QuoraRetrieval":79.94,"SCIDOCS":9.93,"SciFact":42.42,"Touche2020":7.75,"TRECCOVID":39.48} -{"index":67,"Rank":151,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":25.93,"ArguAna":43.64,"ClimateFEVER":18.95,"CQADupstackRetrieval":18.5,"DBPedia":13.21,"FEVER":16.96,"FiQA2018":16.99,"HotpotQA":22.64,"MSMARCO":7.03,"NFCorpus":15.73,"NQ":17.96,"QuoraRetrieval":78.23,"SCIDOCS":5.53,"SciFact":38.31,"Touche2020":19.17,"TRECCOVID":56.04} -{"index":71,"Rank":152,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":25.51,"ArguAna":34.8,"ClimateFEVER":8.35,"CQADupstackRetrieval":19.16,"DBPedia":21.32,"FEVER":26.08,"FiQA2018":11.71,"HotpotQA":38.38,"MSMARCO":16.96,"NFCorpus":14.02,"NQ":20.74,"QuoraRetrieval":74.94,"SCIDOCS":9.39,"SciFact":46.0,"Touche2020":10.51,"TRECCOVID":30.27} -{"index":217,"Rank":153,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":21.82,"ArguAna":38.33,"ClimateFEVER":11.98,"CQADupstackRetrieval":14.5,"DBPedia":19.73,"FEVER":20.41,"FiQA2018":10.41,"HotpotQA":22.9,"MSMARCO":11.0,"NFCorpus":12.42,"NQ":16.08,"QuoraRetrieval":79.62,"SCIDOCS":7.53,"SciFact":29.59,"Touche2020":9.9,"TRECCOVID":22.93} -{"index":232,"Rank":154,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":21.62,"ArguAna":36.3,"ClimateFEVER":14.44,"CQADupstackRetrieval":15.47,"DBPedia":18.28,"FEVER":14.99,"FiQA2018":10.09,"HotpotQA":19.18,"MSMARCO":9.6,"NFCorpus":13.87,"NQ":12.87,"QuoraRetrieval":71.32,"SCIDOCS":8.04,"SciFact":29.58,"Touche2020":13.99,"TRECCOVID":36.22} -{"index":233,"Rank":155,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":21.22,"ArguAna":30.96,"ClimateFEVER":14.87,"CQADupstackRetrieval":16.79,"DBPedia":15.88,"FEVER":15.56,"FiQA2018":10.49,"HotpotQA":20.77,"MSMARCO":9.75,"NFCorpus":11.79,"NQ":12.75,"QuoraRetrieval":71.57,"SCIDOCS":8.47,"SciFact":29.53,"Touche2020":13.17,"TRECCOVID":35.92} -{"index":81,"Rank":156,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":20.9,"ArguAna":31.04,"ClimateFEVER":11.01,"CQADupstackRetrieval":20.29,"DBPedia":10.87,"FEVER":18.4,"FiQA2018":8.94,"HotpotQA":17.73,"MSMARCO":6.27,"NFCorpus":11.8,"NQ":7.63,"QuoraRetrieval":78.96,"SCIDOCS":7.13,"SciFact":31.79,"Touche2020":12.27,"TRECCOVID":39.31} -{"index":218,"Rank":157,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":20.29,"ArguAna":38.34,"ClimateFEVER":11.8,"CQADupstackRetrieval":13.22,"DBPedia":15.04,"FEVER":21.06,"FiQA2018":9.84,"HotpotQA":19.75,"MSMARCO":9.35,"NFCorpus":9.88,"NQ":11.69,"QuoraRetrieval":78.03,"SCIDOCS":5.5,"SciFact":25.72,"Touche2020":8.9,"TRECCOVID":26.2} -{"index":227,"Rank":158,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":18.99,"ArguAna":34.18,"ClimateFEVER":3.83,"CQADupstackRetrieval":18.75,"DBPedia":15.57,"FEVER":12.18,"FiQA2018":7.0,"HotpotQA":18.75,"MSMARCO":7.6,"NFCorpus":16.54,"NQ":8.42,"QuoraRetrieval":77.03,"SCIDOCS":5.63,"SciFact":38.2,"Touche2020":4.88,"TRECCOVID":16.34} -{"index":277,"Rank":159,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":18.36,"ArguAna":39.65,"ClimateFEVER":2.83,"CQADupstackRetrieval":10.17,"DBPedia":3.48,"FEVER":4.45,"FiQA2018":7.54,"HotpotQA":12.6,"MSMARCO":10.53,"NFCorpus":20.59,"NQ":2.02,"QuoraRetrieval":82.18,"SCIDOCS":6.28,"SciFact":45.46,"Touche2020":3.1,"TRECCOVID":24.56} -{"index":231,"Rank":160,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":15.88,"ArguAna":32.67,"ClimateFEVER":6.86,"CQADupstackRetrieval":14.6,"DBPedia":4.14,"FEVER":5.45,"FiQA2018":5.64,"HotpotQA":5.46,"MSMARCO":5.59,"NFCorpus":0.85,"NQ":5.99,"QuoraRetrieval":64.65,"SCIDOCS":0.0,"SciFact":47.88,"Touche2020":8.46,"TRECCOVID":29.91} -{"index":141,"Rank":161,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":13.47,"ArguAna":18.3,"ClimateFEVER":1.79,"CQADupstackRetrieval":8.87,"DBPedia":3.92,"FEVER":1.59,"FiQA2018":3.0,"HotpotQA":12.96,"MSMARCO":3.0,"NFCorpus":5.59,"NQ":0.89,"QuoraRetrieval":78.62,"SCIDOCS":1.79,"SciFact":35.29,"Touche2020":1.68,"TRECCOVID":24.82} -{"index":122,"Rank":162,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":10.59,"ArguAna":28.29,"ClimateFEVER":5.41,"CQADupstackRetrieval":5.51,"DBPedia":4.13,"FEVER":3.3,"FiQA2018":2.19,"HotpotQA":8.26,"MSMARCO":1.91,"NFCorpus":4.3,"NQ":2.62,"QuoraRetrieval":61.03,"SCIDOCS":2.82,"SciFact":13.34,"Touche2020":0.97,"TRECCOVID":14.74} -{"index":11,"Rank":163,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":7.94,"ArguAna":12.86,"ClimateFEVER":0.36,"CQADupstackRetrieval":4.12,"DBPedia":1.53,"FEVER":0.77,"FiQA2018":1.73,"HotpotQA":5.5,"MSMARCO":1.09,"NFCorpus":2.44,"NQ":0.64,"QuoraRetrieval":71.14,"SCIDOCS":0.78,"SciFact":4.04,"Touche2020":1.06,"TRECCOVID":10.97} -{"index":2,"Rank":164,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":48.83,"ClimateFEVER":"","CQADupstackRetrieval":"","DBPedia":"","FEVER":"","FiQA2018":40.38,"HotpotQA":"","MSMARCO":35.19,"NFCorpus":"","NQ":51.08,"QuoraRetrieval":"","SCIDOCS":"","SciFact":73.5,"Touche2020":"","TRECCOVID":54.74} -{"index":77,"Rank":196,"Model":"test24<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":44.35,"ClimateFEVER":17.77,"CQADupstackRetrieval":25.56,"DBPedia":21.94,"FEVER":"","FiQA2018":"","HotpotQA":"","MSMARCO":"","NFCorpus":"","NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":"","Touche2020":"","TRECCOVID":""} -{"index":78,"Rank":197,"Model":"test25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":35.0,"ClimateFEVER":"","CQADupstackRetrieval":22.96,"DBPedia":"","FEVER":"","FiQA2018":"","HotpotQA":"","MSMARCO":"","NFCorpus":"","NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":"","Touche2020":"","TRECCOVID":""} -{"index":93,"Rank":206,"Model":"e5-large-v2-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":46.42,"ClimateFEVER":22.21,"CQADupstackRetrieval":"","DBPedia":44.02,"FEVER":82.83,"FiQA2018":41.14,"HotpotQA":73.13,"MSMARCO":43.46,"NFCorpus":37.13,"NQ":63.44,"QuoraRetrieval":86.84,"SCIDOCS":20.51,"SciFact":72.24,"Touche2020":20.67,"TRECCOVID":66.54} -{"index":111,"Rank":213,"Model":"UAE-Large-V1-Q5_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":66.15,"ClimateFEVER":38.53,"CQADupstackRetrieval":"","DBPedia":44.89,"FEVER":88.24,"FiQA2018":44.84,"HotpotQA":73.13,"MSMARCO":41.4,"NFCorpus":38.65,"NQ":55.86,"QuoraRetrieval":89.02,"SCIDOCS":22.98,"SciFact":74.07,"Touche2020":24.93,"TRECCOVID":76.33} -{"index":114,"Rank":214,"Model":"bge-large-en-v1.5-Q4_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":63.54,"ClimateFEVER":36.57,"CQADupstackRetrieval":"","DBPedia":44.11,"FEVER":87.18,"FiQA2018":45.02,"HotpotQA":74.1,"MSMARCO":42.49,"NFCorpus":38.13,"NQ":55.03,"QuoraRetrieval":89.07,"SCIDOCS":22.64,"SciFact":74.61,"Touche2020":24.81,"TRECCOVID":74.82} -{"index":120,"Rank":215,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":63.61,"ClimateFEVER":31.17,"CQADupstackRetrieval":"","DBPedia":40.77,"FEVER":86.29,"FiQA2018":40.65,"HotpotQA":72.6,"MSMARCO":41.35,"NFCorpus":37.39,"NQ":54.15,"QuoraRetrieval":88.9,"SCIDOCS":21.73,"SciFact":74.04,"Touche2020":25.7,"TRECCOVID":78.07} -{"index":124,"Rank":216,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":"","ArguAna":53.96,"ClimateFEVER":"","CQADupstackRetrieval":"","DBPedia":"","FEVER":"","FiQA2018":41.3,"HotpotQA":"","MSMARCO":"","NFCorpus":31.41,"NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":64.37,"Touche2020":"","TRECCOVID":""} -{"index":127,"Rank":217,"Model":"cai-lunaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":37.27,"ClimateFEVER":8.69,"CQADupstackRetrieval":18.81,"DBPedia":14.77,"FEVER":"","FiQA2018":"","HotpotQA":"","MSMARCO":"","NFCorpus":"","NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":"","Touche2020":"","TRECCOVID":""} -{"index":128,"Rank":218,"Model":"cai-stellaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":47.29,"ClimateFEVER":13.48,"CQADupstackRetrieval":31.03,"DBPedia":22.45,"FEVER":"","FiQA2018":"","HotpotQA":"","MSMARCO":"","NFCorpus":"","NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":"","Touche2020":"","TRECCOVID":""} -{"index":133,"Rank":221,"Model":"mxbai-embed-large-v1-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":66.02,"ClimateFEVER":36.09,"CQADupstackRetrieval":"","DBPedia":44.51,"FEVER":86.91,"FiQA2018":45.27,"HotpotQA":72.03,"MSMARCO":41.26,"NFCorpus":38.64,"NQ":55.79,"QuoraRetrieval":88.98,"SCIDOCS":23.32,"SciFact":74.73,"Touche2020":25.2,"TRECCOVID":75.57} -{"index":135,"Rank":222,"Model":"jina-embeddings-v2-base-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":44.18,"ClimateFEVER":23.53,"CQADupstackRetrieval":"","DBPedia":35.05,"FEVER":72.33,"FiQA2018":41.58,"HotpotQA":61.38,"MSMARCO":40.92,"NFCorpus":32.45,"NQ":60.44,"QuoraRetrieval":88.2,"SCIDOCS":19.86,"SciFact":66.68,"Touche2020":26.24,"TRECCOVID":65.91} -{"index":136,"Rank":223,"Model":"jina-embeddings-v2-small-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":46.73,"ClimateFEVER":24.05,"CQADupstackRetrieval":"","DBPedia":32.65,"FEVER":68.02,"FiQA2018":33.43,"HotpotQA":56.48,"MSMARCO":37.28,"NFCorpus":30.4,"NQ":51.59,"QuoraRetrieval":87.19,"SCIDOCS":18.61,"SciFact":63.89,"Touche2020":23.52,"TRECCOVID":65.18} -{"index":137,"Rank":224,"Model":"stella-base-en-v2-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":60.63,"ClimateFEVER":29.0,"CQADupstackRetrieval":"","DBPedia":39.64,"FEVER":79.13,"FiQA2018":38.62,"HotpotQA":68.22,"MSMARCO":40.95,"NFCorpus":37.51,"NQ":50.2,"QuoraRetrieval":88.72,"SCIDOCS":18.58,"SciFact":72.51,"Touche2020":21.9,"TRECCOVID":64.79} -{"index":150,"Rank":230,"Model":"bge-large-en-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":63.54,"ClimateFEVER":36.57,"CQADupstackRetrieval":"","DBPedia":44.11,"FEVER":87.18,"FiQA2018":45.02,"HotpotQA":74.1,"MSMARCO":42.49,"NFCorpus":38.13,"NQ":55.03,"QuoraRetrieval":89.07,"SCIDOCS":22.64,"SciFact":74.61,"Touche2020":24.81,"TRECCOVID":74.82} -{"index":165,"Rank":233,"Model":"UAE-Large-V1-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":66.15,"ClimateFEVER":38.53,"CQADupstackRetrieval":"","DBPedia":44.89,"FEVER":88.24,"FiQA2018":44.84,"HotpotQA":73.13,"MSMARCO":41.4,"NFCorpus":38.65,"NQ":55.86,"QuoraRetrieval":89.02,"SCIDOCS":22.98,"SciFact":74.07,"Touche2020":24.93,"TRECCOVID":76.33} -{"index":174,"Rank":234,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":49.33,"ClimateFEVER":21.8,"CQADupstackRetrieval":36.22,"DBPedia":31.47,"FEVER":67.89,"FiQA2018":32.3,"HotpotQA":55.39,"MSMARCO":"","NFCorpus":28.61,"NQ":48.9,"QuoraRetrieval":87.93,"SCIDOCS":16.29,"SciFact":60.68,"Touche2020":21.03,"TRECCOVID":65.12} -{"index":176,"Rank":235,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":50.13,"ClimateFEVER":27.13,"CQADupstackRetrieval":38.78,"DBPedia":32.63,"FEVER":78.43,"FiQA2018":37.01,"HotpotQA":59.48,"MSMARCO":"","NFCorpus":30.3,"NQ":50.7,"QuoraRetrieval":88.14,"SCIDOCS":17.36,"SciFact":62.67,"Touche2020":19.82,"TRECCOVID":67.37} -{"index":196,"Rank":243,"Model":"fin-mpnet-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":49.11,"ClimateFEVER":"","CQADupstackRetrieval":"","DBPedia":"","FEVER":"","FiQA2018":79.91,"HotpotQA":"","MSMARCO":"","NFCorpus":29.64,"NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":65.4,"Touche2020":"","TRECCOVID":""} -{"index":202,"Rank":248,"Model":"bge-small-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":57.77,"ClimateFEVER":"","CQADupstackRetrieval":"","DBPedia":"","FEVER":"","FiQA2018":"","HotpotQA":"","MSMARCO":"","NFCorpus":"","NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":"","Touche2020":"","TRECCOVID":""} -{"index":203,"Rank":249,"Model":"bge-small-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":49.13,"ClimateFEVER":"","CQADupstackRetrieval":"","DBPedia":"","FEVER":"","FiQA2018":"","HotpotQA":"","MSMARCO":"","NFCorpus":"","NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":"","Touche2020":"","TRECCOVID":""} -{"index":248,"Rank":260,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":"","ClimateFEVER":"","CQADupstackRetrieval":"","DBPedia":"","FEVER":"","FiQA2018":"","HotpotQA":"","MSMARCO":"","NFCorpus":"","NQ":"","QuoraRetrieval":83.07,"SCIDOCS":"","SciFact":"","Touche2020":"","TRECCOVID":""} -{"index":251,"Rank":262,"Model":"Angle_BERT<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":38.88,"ClimateFEVER":9.94,"CQADupstackRetrieval":18.0,"DBPedia":"","FEVER":"","FiQA2018":"","HotpotQA":"","MSMARCO":"","NFCorpus":"","NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":"","Touche2020":"","TRECCOVID":""} -{"index":255,"Rank":263,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":5.35,"ClimateFEVER":0.59,"CQADupstackRetrieval":"","DBPedia":0.43,"FEVER":0.51,"FiQA2018":0.76,"HotpotQA":0.59,"MSMARCO":0.31,"NFCorpus":4.87,"NQ":0.33,"QuoraRetrieval":31.95,"SCIDOCS":1.02,"SciFact":2.91,"Touche2020":3.82,"TRECCOVID":7.5} -{"index":256,"Rank":264,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":36.96,"ClimateFEVER":20.95,"CQADupstackRetrieval":"","DBPedia":24.25,"FEVER":29.03,"FiQA2018":13.57,"HotpotQA":33.73,"MSMARCO":9.51,"NFCorpus":21.89,"NQ":20.45,"QuoraRetrieval":67.91,"SCIDOCS":11.37,"SciFact":38.8,"Touche2020":18.78,"TRECCOVID":49.87} -{"index":262,"Rank":269,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":54.83,"ClimateFEVER":31.95,"CQADupstackRetrieval":32.2,"DBPedia":41.81,"FEVER":62.94,"FiQA2018":29.36,"HotpotQA":63.85,"MSMARCO":"","NFCorpus":28.47,"NQ":42.04,"QuoraRetrieval":88.15,"SCIDOCS":17.3,"SciFact":65.72,"Touche2020":18.1,"TRECCOVID":53.89} -{"index":263,"Rank":270,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":0.44,"ClimateFEVER":0.11,"CQADupstackRetrieval":"","DBPedia":0.28,"FEVER":0.25,"FiQA2018":0.16,"HotpotQA":0.3,"MSMARCO":0.2,"NFCorpus":0.23,"NQ":0.21,"QuoraRetrieval":0.84,"SCIDOCS":0.15,"SciFact":0.51,"Touche2020":0.12,"TRECCOVID":0.37} -{"index":267,"Rank":274,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":46.87,"ClimateFEVER":19.68,"CQADupstackRetrieval":"","DBPedia":36.06,"FEVER":69.98,"FiQA2018":35.49,"HotpotQA":65.0,"MSMARCO":68.72,"NFCorpus":31.81,"NQ":52.15,"QuoraRetrieval":85.02,"SCIDOCS":17.36,"SciFact":67.97,"Touche2020":13.23,"TRECCOVID":52.61} -{"index":272,"Rank":279,"Model":"text-search-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":46.91,"ClimateFEVER":18.5,"CQADupstackRetrieval":"","DBPedia":36.2,"FEVER":72.1,"FiQA2018":38.41,"HotpotQA":59.39,"MSMARCO":37.94,"NFCorpus":33.17,"NQ":42.81,"QuoraRetrieval":70.57,"SCIDOCS":14.83,"SciFact":67.25,"Touche2020":28.68,"TRECCOVID":72.43} -{"index":274,"Rank":281,"Model":"text-search-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":49.2,"ClimateFEVER":19.9,"CQADupstackRetrieval":"","DBPedia":"","FEVER":77.0,"FiQA2018":42.2,"HotpotQA":63.1,"MSMARCO":"","NFCorpus":36.7,"NQ":"","QuoraRetrieval":69.7,"SCIDOCS":"","SciFact":70.4,"Touche2020":29.7,"TRECCOVID":58.5} -{"index":275,"Rank":282,"Model":"text-search-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":46.98,"ClimateFEVER":19.4,"CQADupstackRetrieval":"","DBPedia":"","FEVER":75.6,"FiQA2018":45.21,"HotpotQA":64.8,"MSMARCO":"","NFCorpus":38.01,"NQ":"","QuoraRetrieval":67.7,"SCIDOCS":17.74,"SciFact":74.35,"Touche2020":30.9,"TRECCOVID":56.14} -{"index":276,"Rank":283,"Model":"text-search-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":43.5,"ClimateFEVER":22.3,"CQADupstackRetrieval":"","DBPedia":"","FEVER":77.5,"FiQA2018":51.2,"HotpotQA":68.8,"MSMARCO":"","NFCorpus":40.7,"NQ":"","QuoraRetrieval":63.8,"SCIDOCS":"","SciFact":75.4,"Touche2020":29.1,"TRECCOVID":64.9} -{"index":279,"Rank":285,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":"","ClimateFEVER":"","CQADupstackRetrieval":"","DBPedia":"","FEVER":"","FiQA2018":5.14,"HotpotQA":"","MSMARCO":"","NFCorpus":19.96,"NQ":"","QuoraRetrieval":83.11,"SCIDOCS":"","SciFact":46.68,"Touche2020":"","TRECCOVID":7.61} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":44.23,"AlloprofRetrieval":38.15,"AlloprofRetrieval (fra-Latn)":39.34,"BSARDRetrieval":0.27,"BSARDRetrieval (fra-Latn)":21.28,"MintakaRetrieval (fr)":25.2,"SyntecRetrieval":81.07,"SyntecRetrieval (fra-Latn)":82.39,"XPQARetrieval (fr)":66.15} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":42.76,"AlloprofRetrieval":36.21,"AlloprofRetrieval (fra-Latn)":34.45,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":18.83,"MintakaRetrieval (fr)":23.46,"SyntecRetrieval":80.49,"SyntecRetrieval (fra-Latn)":82.86,"XPQARetrieval (fr)":65.81} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":37.27,"AlloprofRetrieval":27.01,"AlloprofRetrieval (fra-Latn)":27.38,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":14.54,"MintakaRetrieval (fr)":22.53,"SyntecRetrieval":75.76,"SyntecRetrieval (fra-Latn)":73.46,"XPQARetrieval (fr)":57.47} +{"Rank":4,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":37.18,"AlloprofRetrieval":30.8,"AlloprofRetrieval (fra-Latn)":30.8,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":13.19,"MintakaRetrieval (fr)":24.45,"SyntecRetrieval":76.0,"SyntecRetrieval (fra-Latn)":76.0,"XPQARetrieval (fr)":46.22} +{"Rank":5,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":32.25,"AlloprofRetrieval":26.63,"AlloprofRetrieval (fra-Latn)":26.63,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":9.6,"MintakaRetrieval (fr)":21.53,"SyntecRetrieval":65.54,"SyntecRetrieval (fra-Latn)":65.54,"XPQARetrieval (fr)":42.51} +{"Rank":6,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":30.36,"AlloprofRetrieval":28.41,"AlloprofRetrieval (fra-Latn)":28.41,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":4.8,"MintakaRetrieval (fr)":9.19,"SyntecRetrieval":60.15,"SyntecRetrieval (fra-Latn)":60.15,"XPQARetrieval (fr)":51.79} +{"Rank":7,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":27.73,"AlloprofRetrieval":19.77,"AlloprofRetrieval (fra-Latn)":19.77,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":4.44,"MintakaRetrieval (fr)":15.53,"SyntecRetrieval":55.31,"SyntecRetrieval (fra-Latn)":55.31,"XPQARetrieval (fr)":51.74} +{"Rank":8,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":56.84,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":2.48,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":21.73,"SyntecRetrieval":78.77,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":74.24} +{"Rank":9,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":45.5,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.15,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":15.51,"SyntecRetrieval":75.83,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":67.07} +{"Rank":10,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":52.61,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.29,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":19.05,"SyntecRetrieval":82.77,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":71.95} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":57.28,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":11.83,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":34.92,"SyntecRetrieval":87.33,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":73.56} +{"Rank":12,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":58.27,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":5.14,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":49.19,"SyntecRetrieval":87.28,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":72.92} +{"Rank":13,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"AlloprofRetrieval":3.1,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.36,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":6.31,"SyntecRetrieval":28.58,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":42.59} +{"Rank":14,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":35.39,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":23.0,"SyntecRetrieval":76.88,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":45.23} +{"Rank":15,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":38.36,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.14,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":25.44,"SyntecRetrieval":79.27,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":58.87} +{"Rank":16,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":17,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":19,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofRetrieval":1.6,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":3.55,"SyntecRetrieval":18.95,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":18.39} +{"Rank":20,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofRetrieval":1.61,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":3.55,"SyntecRetrieval":18.95,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":18.35} +{"Rank":21,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofRetrieval":1.6,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":3.55,"SyntecRetrieval":18.95,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":18.46} +{"Rank":22,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":55.42,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":26.63,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":89.48,"XPQARetrieval (fr)":null} +{"Rank":23,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"AlloprofRetrieval":29.97,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":21.31,"SyntecRetrieval":74.2,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":58.57} +{"Rank":24,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":25,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofRetrieval":21.94,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":13.36,"SyntecRetrieval":68.62,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":57.92} +{"Rank":26,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"AlloprofRetrieval":31.62,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":21.87,"SyntecRetrieval":81.11,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":65.62} +{"Rank":27,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":28,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":29,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofRetrieval":1.63,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":0.58,"SyntecRetrieval":20.56,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":6.59} +{"Rank":30,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofRetrieval":1.72,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":0.51,"SyntecRetrieval":22.33,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":9.09} +{"Rank":31,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"AlloprofRetrieval":0.58,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":0.26,"SyntecRetrieval":1.58,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":3.69} +{"Rank":32,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofRetrieval":1.63,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":3.55,"SyntecRetrieval":18.95,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":18.49} +{"Rank":33,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofRetrieval":5.51,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":2.87,"SyntecRetrieval":34.95,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":26.12} +{"Rank":34,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"AlloprofRetrieval":16.46,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":3.57,"SyntecRetrieval":55.9,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":41.29} +{"Rank":35,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":12.37,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":2.78,"SyntecRetrieval":40.57,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":33.82} +{"Rank":36,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":1.98,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":0.48,"SyntecRetrieval":24.45,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":12.98} +{"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"AlloprofRetrieval":33.2,"AlloprofRetrieval (fra-Latn)":33.2,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":6.24,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":60.8,"XPQARetrieval (fr)":55.9} +{"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":34.27,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":6.98,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":57.39,"XPQARetrieval (fr)":null} +{"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":40,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":41,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"AlloprofRetrieval":26.99,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":22.55,"SyntecRetrieval":65.34,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":51.2} +{"Rank":42,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":43,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"AlloprofRetrieval":30.23,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":16.31,"SyntecRetrieval":58.07,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":48.83} +{"Rank":45,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofRetrieval":27.52,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.16,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":21.04,"SyntecRetrieval":67.0,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":45.19} +{"Rank":46,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofRetrieval":34.52,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":23.92,"SyntecRetrieval":71.05,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":48.79} +{"Rank":47,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AlloprofRetrieval":40.38,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.14,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":31.54,"SyntecRetrieval":74.24,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":52.14} +{"Rank":48,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"AlloprofRetrieval":45.75,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":3.33,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":34.93,"SyntecRetrieval":78.97,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":56.2} +{"Rank":49,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":50,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":51,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":18.9,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":14.81,"SyntecRetrieval":49.69,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":40.4} +{"Rank":52,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":35.27,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":26.12,"SyntecRetrieval":69.82,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":59.59} +{"Rank":53,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":33.78,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":26.21,"SyntecRetrieval":63.69,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":65.21} +{"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"AlloprofRetrieval":0.16,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":0.88,"SyntecRetrieval":3.33,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":11.65} +{"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"AlloprofRetrieval":0.52,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":0.9,"SyntecRetrieval":6.6,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":12.7} +{"Rank":56,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":51.64,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.61,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":29.94,"SyntecRetrieval":85.97,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":73.0} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} diff --git a/all_data_tasks/40/default.jsonl b/all_data_tasks/40/default.jsonl new file mode 100644 index 0000000000000000000000000000000000000000..5f9799c41b32368435c181e147cc8d104986e655 --- /dev/null +++ b/all_data_tasks/40/default.jsonl @@ -0,0 +1,25 @@ +{"Rank":1,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"TERRa (rus-Cyrl)":64.99} +{"Rank":2,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"TERRa (rus-Cyrl)":64.57} +{"Rank":3,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"TERRa (rus-Cyrl)":60.6} +{"Rank":4,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"TERRa (rus-Cyrl)":60.02} +{"Rank":5,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"TERRa (rus-Cyrl)":59.39} +{"Rank":6,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"TERRa (rus-Cyrl)":59.38} +{"Rank":7,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"TERRa (rus-Cyrl)":59.12} +{"Rank":8,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"TERRa (rus-Cyrl)":58.56} +{"Rank":9,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"TERRa (rus-Cyrl)":58.4} +{"Rank":10,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"TERRa (rus-Cyrl)":57.81} +{"Rank":11,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"TERRa (rus-Cyrl)":56.09} +{"Rank":12,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"TERRa (rus-Cyrl)":55.71} +{"Rank":13,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"TERRa (rus-Cyrl)":55.61} +{"Rank":14,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"TERRa (rus-Cyrl)":55.14} +{"Rank":15,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"TERRa (rus-Cyrl)":54.96} +{"Rank":16,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"TERRa (rus-Cyrl)":53.78} +{"Rank":17,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"TERRa (rus-Cyrl)":52.48} +{"Rank":18,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"TERRa (rus-Cyrl)":52.12} +{"Rank":19,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"TERRa (rus-Cyrl)":51.97} +{"Rank":20,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"TERRa (rus-Cyrl)":51.87} +{"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"TERRa (rus-Cyrl)":51.06} +{"Rank":22,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"TERRa (rus-Cyrl)":50.17} +{"Rank":23,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"TERRa (rus-Cyrl)":46.4} +{"Rank":24,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"TERRa (rus-Cyrl)":45.03} +{"Rank":25,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"TERRa (rus-Cyrl)":44.52} diff --git a/all_data_tasks/41/default.jsonl b/all_data_tasks/41/default.jsonl new file mode 100644 index 0000000000000000000000000000000000000000..14e74d4f7124aa81c3b912bd0d4e8ab325e295c6 --- /dev/null +++ b/all_data_tasks/41/default.jsonl @@ -0,0 +1,25 @@ +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"RuBQReranking (rus-Cyrl)":75.58} +{"Rank":2,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"RuBQReranking (rus-Cyrl)":74.61} +{"Rank":3,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"RuBQReranking (rus-Cyrl)":74.02} +{"Rank":4,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"RuBQReranking (rus-Cyrl)":73.08} +{"Rank":5,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"RuBQReranking (rus-Cyrl)":72.41} +{"Rank":6,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"RuBQReranking (rus-Cyrl)":72.01} +{"Rank":7,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"RuBQReranking (rus-Cyrl)":71.46} +{"Rank":8,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"RuBQReranking (rus-Cyrl)":68.65} +{"Rank":9,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"RuBQReranking (rus-Cyrl)":64.42} +{"Rank":10,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"RuBQReranking (rus-Cyrl)":62.15} +{"Rank":11,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"RuBQReranking (rus-Cyrl)":58.77} +{"Rank":12,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"RuBQReranking (rus-Cyrl)":56.13} +{"Rank":13,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"RuBQReranking (rus-Cyrl)":55.13} +{"Rank":14,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"RuBQReranking (rus-Cyrl)":54.83} +{"Rank":15,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"RuBQReranking (rus-Cyrl)":52.8} +{"Rank":16,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"RuBQReranking (rus-Cyrl)":46.81} +{"Rank":17,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"RuBQReranking (rus-Cyrl)":46.09} +{"Rank":18,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"RuBQReranking (rus-Cyrl)":42.58} +{"Rank":19,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"RuBQReranking (rus-Cyrl)":41.65} +{"Rank":20,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"RuBQReranking (rus-Cyrl)":39.89} +{"Rank":21,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"RuBQReranking (rus-Cyrl)":38.51} +{"Rank":22,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"RuBQReranking (rus-Cyrl)":35.44} +{"Rank":23,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"RuBQReranking (rus-Cyrl)":34.01} +{"Rank":24,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"RuBQReranking (rus-Cyrl)":30.96} +{"Rank":25,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"RuBQReranking (rus-Cyrl)":27.05} diff --git a/all_data_tasks/42/default.jsonl b/all_data_tasks/42/default.jsonl new file mode 100644 index 0000000000000000000000000000000000000000..6b9538f6ec61f4d6f6b7e6481e66faa065c32b7d --- /dev/null +++ b/all_data_tasks/42/default.jsonl @@ -0,0 +1,25 @@ +{"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":77.96,"RiaNewsRetrieval (rus-Cyrl)":81.94,"RuBQRetrieval (rus-Cyrl)":73.98} +{"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":77.39,"RiaNewsRetrieval (rus-Cyrl)":80.67,"RuBQRetrieval (rus-Cyrl)":74.11} +{"Rank":3,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":77.1,"RiaNewsRetrieval (rus-Cyrl)":82.98,"RuBQRetrieval (rus-Cyrl)":71.21} +{"Rank":4,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":76.78,"RiaNewsRetrieval (rus-Cyrl)":83.53,"RuBQRetrieval (rus-Cyrl)":70.03} +{"Rank":5,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":69.91,"RiaNewsRetrieval (rus-Cyrl)":70.24,"RuBQRetrieval (rus-Cyrl)":69.58} +{"Rank":6,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":69.27,"RiaNewsRetrieval (rus-Cyrl)":70.01,"RuBQRetrieval (rus-Cyrl)":68.53} +{"Rank":7,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":67.54,"RiaNewsRetrieval (rus-Cyrl)":69.36,"RuBQRetrieval (rus-Cyrl)":65.71} +{"Rank":8,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":67.34,"RiaNewsRetrieval (rus-Cyrl)":77.83,"RuBQRetrieval (rus-Cyrl)":56.86} +{"Rank":9,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":51.5,"RiaNewsRetrieval (rus-Cyrl)":51.27,"RuBQRetrieval (rus-Cyrl)":51.73} +{"Rank":10,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":44.4,"RiaNewsRetrieval (rus-Cyrl)":51.75,"RuBQRetrieval (rus-Cyrl)":37.04} +{"Rank":11,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":37.26,"RiaNewsRetrieval (rus-Cyrl)":44.82,"RuBQRetrieval (rus-Cyrl)":29.7} +{"Rank":12,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":36.38,"RiaNewsRetrieval (rus-Cyrl)":42.75,"RuBQRetrieval (rus-Cyrl)":30.02} +{"Rank":13,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":31.88,"RiaNewsRetrieval (rus-Cyrl)":34.73,"RuBQRetrieval (rus-Cyrl)":29.03} +{"Rank":14,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":25.6,"RiaNewsRetrieval (rus-Cyrl)":21.4,"RuBQRetrieval (rus-Cyrl)":29.8} +{"Rank":15,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":12.4,"RiaNewsRetrieval (rus-Cyrl)":13.92,"RuBQRetrieval (rus-Cyrl)":10.87} +{"Rank":16,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":11.78,"RiaNewsRetrieval (rus-Cyrl)":11.11,"RuBQRetrieval (rus-Cyrl)":12.45} +{"Rank":17,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":9.68,"RiaNewsRetrieval (rus-Cyrl)":6.72,"RuBQRetrieval (rus-Cyrl)":12.63} +{"Rank":18,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":7.55,"RiaNewsRetrieval (rus-Cyrl)":5.58,"RuBQRetrieval (rus-Cyrl)":9.52} +{"Rank":19,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":7.5,"RiaNewsRetrieval (rus-Cyrl)":4.84,"RuBQRetrieval (rus-Cyrl)":10.15} +{"Rank":20,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":7.37,"RiaNewsRetrieval (rus-Cyrl)":4.14,"RuBQRetrieval (rus-Cyrl)":10.6} +{"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":2.02,"RiaNewsRetrieval (rus-Cyrl)":0.79,"RuBQRetrieval (rus-Cyrl)":3.24} +{"Rank":22,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":1.66,"RiaNewsRetrieval (rus-Cyrl)":0.67,"RuBQRetrieval (rus-Cyrl)":2.64} +{"Rank":23,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"RiaNewsRetrieval (rus-Cyrl)":null,"RuBQRetrieval (rus-Cyrl)":70.94} +{"Rank":24,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"RiaNewsRetrieval (rus-Cyrl)":null,"RuBQRetrieval (rus-Cyrl)":8.84} +{"Rank":25,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"RiaNewsRetrieval (rus-Cyrl)":null,"RuBQRetrieval (rus-Cyrl)":4.75} diff --git a/all_data_tasks/43/default.jsonl b/all_data_tasks/43/default.jsonl new file mode 100644 index 0000000000000000000000000000000000000000..d65bb348a98b52f6c0ee3f05ee08e5797e02f1a0 --- /dev/null +++ b/all_data_tasks/43/default.jsonl @@ -0,0 +1,25 @@ +{"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":80.15,"RUParaPhraserSTS (rus-Cyrl)":76.17,"RuSTSBenchmarkSTS (rus-Cyrl)":84.13} +{"Rank":2,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":79.85,"RUParaPhraserSTS (rus-Cyrl)":76.36,"RuSTSBenchmarkSTS (rus-Cyrl)":83.35} +{"Rank":3,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":77.91,"RUParaPhraserSTS (rus-Cyrl)":73.56,"RuSTSBenchmarkSTS (rus-Cyrl)":82.26} +{"Rank":4,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":77.48,"RUParaPhraserSTS (rus-Cyrl)":71.82,"RuSTSBenchmarkSTS (rus-Cyrl)":83.15} +{"Rank":5,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":77.39,"RUParaPhraserSTS (rus-Cyrl)":74.9,"RuSTSBenchmarkSTS (rus-Cyrl)":79.87} +{"Rank":6,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":77.37,"RUParaPhraserSTS (rus-Cyrl)":72.97,"RuSTSBenchmarkSTS (rus-Cyrl)":81.77} +{"Rank":7,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":75.32,"RUParaPhraserSTS (rus-Cyrl)":72.15,"RuSTSBenchmarkSTS (rus-Cyrl)":78.48} +{"Rank":8,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.9,"RUParaPhraserSTS (rus-Cyrl)":70.17,"RuSTSBenchmarkSTS (rus-Cyrl)":79.64} +{"Rank":9,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":74.27,"RUParaPhraserSTS (rus-Cyrl)":70.46,"RuSTSBenchmarkSTS (rus-Cyrl)":78.08} +{"Rank":10,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.1,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":82.46} +{"Rank":11,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":73.67,"RUParaPhraserSTS (rus-Cyrl)":71.08,"RuSTSBenchmarkSTS (rus-Cyrl)":76.26} +{"Rank":12,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":70.71,"RUParaPhraserSTS (rus-Cyrl)":61.87,"RuSTSBenchmarkSTS (rus-Cyrl)":79.55} +{"Rank":13,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":69.6,"RUParaPhraserSTS (rus-Cyrl)":65.87,"RuSTSBenchmarkSTS (rus-Cyrl)":73.32} +{"Rank":14,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":69.54,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":73.34} +{"Rank":15,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":68.19,"RUParaPhraserSTS (rus-Cyrl)":65.17,"RuSTSBenchmarkSTS (rus-Cyrl)":71.22} +{"Rank":16,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":67.28,"RUParaPhraserSTS (rus-Cyrl)":65.14,"RuSTSBenchmarkSTS (rus-Cyrl)":69.43} +{"Rank":17,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":66.13,"RUParaPhraserSTS (rus-Cyrl)":66.24,"RuSTSBenchmarkSTS (rus-Cyrl)":66.03} +{"Rank":18,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":60.44,"RUParaPhraserSTS (rus-Cyrl)":62.06,"RuSTSBenchmarkSTS (rus-Cyrl)":58.82} +{"Rank":19,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":58.36,"RUParaPhraserSTS (rus-Cyrl)":55.01,"RuSTSBenchmarkSTS (rus-Cyrl)":61.72} +{"Rank":20,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":56.25,"RUParaPhraserSTS (rus-Cyrl)":54.03,"RuSTSBenchmarkSTS (rus-Cyrl)":58.47} +{"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":55.78,"RUParaPhraserSTS (rus-Cyrl)":53.41,"RuSTSBenchmarkSTS (rus-Cyrl)":58.16} +{"Rank":22,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.84,"RUParaPhraserSTS (rus-Cyrl)":49.72,"RuSTSBenchmarkSTS (rus-Cyrl)":53.95} +{"Rank":23,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":50.9,"RUParaPhraserSTS (rus-Cyrl)":45.47,"RuSTSBenchmarkSTS (rus-Cyrl)":56.33} +{"Rank":24,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":49.74,"RUParaPhraserSTS (rus-Cyrl)":43.93,"RuSTSBenchmarkSTS (rus-Cyrl)":55.56} +{"Rank":25,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":48.92,"RUParaPhraserSTS (rus-Cyrl)":42.15,"RuSTSBenchmarkSTS (rus-Cyrl)":55.68} diff --git a/all_data_tasks/5/default.jsonl b/all_data_tasks/5/default.jsonl index 93f45e8aea219946555d80d98d1665b4d9650662..93c77c9db4b7dcfccad8229ab83b869760b36098 100644 --- a/all_data_tasks/5/default.jsonl +++ b/all_data_tasks/5/default.jsonl @@ -1,206 +1,57 @@ -{"index":9,"Rank":1,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":85.79,"BIOSSES":89.7,"SICK-R":78.44,"STS12":86.46,"STS13":87.76,"STS14":86.6,"STS15":90.1,"STS16":86.39,"STS17 (en-en)":86.98,"STS22 (en)":76.89,"STSBenchmark":88.56} -{"index":51,"Rank":2,"Model":"neural-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.24,"BIOSSES":87.98,"SICK-R":83.22,"STS12":79.4,"STS13":89.58,"STS14":84.86,"STS15":89.9,"STS16":86.31,"STS17 (en-en)":91.19,"STS22 (en)":70.08,"STSBenchmark":89.91} -{"index":1,"Rank":3,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":85.07,"BIOSSES":89.46,"SICK-R":81.93,"STS12":77.59,"STS13":90.36,"STS14":85.25,"STS15":89.66,"STS16":87.34,"STS17 (en-en)":92.06,"STS22 (en)":68.02,"STSBenchmark":88.99} -{"index":96,"Rank":4,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":85.05,"BIOSSES":86.07,"SICK-R":82.92,"STS12":79.47,"STS13":89.15,"STS14":84.93,"STS15":90.74,"STS16":87.82,"STS17 (en-en)":92.02,"STS22 (en)":68.36,"STSBenchmark":89.0} -{"index":261,"Rank":5,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.04,"BIOSSES":89.15,"SICK-R":82.83,"STS12":78.65,"STS13":90.0,"STS14":84.97,"STS15":89.81,"STS16":86.71,"STS17 (en-en)":89.8,"STS22 (en)":69.67,"STSBenchmark":88.77} -{"index":197,"Rank":6,"Model":"b1ade-embed-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.04,"BIOSSES":89.15,"SICK-R":82.83,"STS12":78.65,"STS13":90.0,"STS14":84.97,"STS15":89.81,"STS16":86.71,"STS17 (en-en)":89.8,"STS22 (en)":69.67,"STSBenchmark":88.77} -{"index":194,"Rank":7,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.0,"BIOSSES":88.41,"SICK-R":82.92,"STS12":78.76,"STS13":90.35,"STS14":85.46,"STS15":89.62,"STS16":86.58,"STS17 (en-en)":89.5,"STS22 (en)":69.34,"STSBenchmark":89.1} -{"index":133,"Rank":8,"Model":"mxbai-embed-large-v1-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.0,"BIOSSES":88.41,"SICK-R":82.92,"STS12":78.76,"STS13":90.35,"STS14":85.46,"STS15":89.62,"STS16":86.58,"STS17 (en-en)":89.5,"STS22 (en)":69.34,"STSBenchmark":89.1} -{"index":53,"Rank":9,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.99,"BIOSSES":88.38,"SICK-R":83.0,"STS12":79.22,"STS13":89.43,"STS14":84.79,"STS15":89.54,"STS16":86.69,"STS17 (en-en)":89.64,"STS22 (en)":70.26,"STSBenchmark":88.96} -{"index":58,"Rank":10,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":84.97,"BIOSSES":86.4,"SICK-R":84.31,"STS12":78.44,"STS13":88.27,"STS14":84.49,"STS15":90.28,"STS16":87.37,"STS17 (en-en)":92.68,"STS22 (en)":68.62,"STSBenchmark":88.81} -{"index":0,"Rank":11,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":84.93,"BIOSSES":89.42,"SICK-R":81.67,"STS12":78.02,"STS13":90.1,"STS14":85.44,"STS15":89.64,"STS16":87.24,"STS17 (en-en)":90.46,"STS22 (en)":67.99,"STSBenchmark":89.33} -{"index":193,"Rank":12,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.9,"BIOSSES":88.06,"SICK-R":82.05,"STS12":78.77,"STS13":90.4,"STS14":85.45,"STS15":90.01,"STS16":87.42,"STS17 (en-en)":88.8,"STS22 (en)":68.8,"STSBenchmark":89.2} -{"index":219,"Rank":13,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.82,"BIOSSES":87.28,"SICK-R":82.33,"STS12":79.7,"STS13":89.21,"STS14":86.01,"STS15":89.7,"STS16":87.68,"STS17 (en-en)":88.03,"STS22 (en)":70.16,"STSBenchmark":88.15} -{"index":161,"Rank":14,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.78,"BIOSSES":86.96,"SICK-R":81.73,"STS12":82.57,"STS13":87.15,"STS14":84.97,"STS15":91.05,"STS16":87.31,"STS17 (en-en)":90.03,"STS22 (en)":67.63,"STSBenchmark":88.38} -{"index":156,"Rank":15,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":84.63,"BIOSSES":85.55,"SICK-R":82.64,"STS12":79.66,"STS13":88.43,"STS14":84.54,"STS15":90.43,"STS16":87.68,"STS17 (en-en)":91.75,"STS22 (en)":66.98,"STSBenchmark":88.6} -{"index":117,"Rank":16,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.59,"BIOSSES":89.17,"SICK-R":82.8,"STS12":77.13,"STS13":89.29,"STS14":83.83,"STS15":89.7,"STS16":86.43,"STS17 (en-en)":89.66,"STS22 (en)":69.61,"STSBenchmark":88.3} -{"index":111,"Rank":17,"Model":"UAE-Large-V1-Q5_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.54,"BIOSSES":86.14,"SICK-R":82.62,"STS12":79.09,"STS13":89.62,"STS14":85.02,"STS15":89.51,"STS16":86.61,"STS17 (en-en)":88.99,"STS22 (en)":68.79,"STSBenchmark":89.06} -{"index":165,"Rank":18,"Model":"UAE-Large-V1-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.54,"BIOSSES":86.14,"SICK-R":82.62,"STS12":79.09,"STS13":89.62,"STS14":85.02,"STS15":89.51,"STS16":86.61,"STS17 (en-en)":88.99,"STS22 (en)":68.79,"STSBenchmark":89.06} -{"index":108,"Rank":19,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.54,"BIOSSES":86.14,"SICK-R":82.62,"STS12":79.09,"STS13":89.62,"STS14":85.02,"STS15":89.51,"STS16":86.61,"STS17 (en-en)":88.99,"STS22 (en)":68.79,"STSBenchmark":89.06} -{"index":138,"Rank":20,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.51,"BIOSSES":83.11,"SICK-R":82.89,"STS12":80.09,"STS13":89.68,"STS14":85.07,"STS15":89.39,"STS16":87.15,"STS17 (en-en)":91.35,"STS22 (en)":68.1,"STSBenchmark":88.23} -{"index":6,"Rank":21,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.31,"BIOSSES":89.24,"SICK-R":83.16,"STS12":73.34,"STS13":88.49,"STS14":86.49,"STS15":91.13,"STS16":85.68,"STS17 (en-en)":90.06,"STS22 (en)":66.32,"STSBenchmark":89.22} -{"index":21,"Rank":22,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.24,"BIOSSES":86.47,"SICK-R":83.87,"STS12":78.14,"STS13":86.59,"STS14":82.83,"STS15":87.77,"STS16":87.04,"STS17 (en-en)":91.25,"STS22 (en)":70.07,"STSBenchmark":88.42} -{"index":139,"Rank":23,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.22,"BIOSSES":83.3,"SICK-R":82.21,"STS12":79.52,"STS13":89.19,"STS14":85.15,"STS15":89.1,"STS16":87.14,"STS17 (en-en)":90.97,"STS22 (en)":67.83,"STSBenchmark":87.74} -{"index":200,"Rank":24,"Model":"bge-large-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.18,"BIOSSES":88.37,"SICK-R":82.06,"STS12":78.83,"STS13":87.99,"STS14":83.5,"STS15":89.0,"STS16":86.45,"STS17 (en-en)":89.56,"STS22 (en)":68.15,"STSBenchmark":87.89} -{"index":64,"Rank":25,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":84.09,"BIOSSES":85.24,"SICK-R":83.7,"STS12":78.8,"STS13":86.37,"STS14":84.04,"STS15":88.99,"STS16":87.22,"STS17 (en-en)":90.19,"STS22 (en)":67.68,"STSBenchmark":88.65} -{"index":23,"Rank":26,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.88,"BIOSSES":85.74,"SICK-R":82.66,"STS12":77.71,"STS13":87.45,"STS14":83.48,"STS15":87.63,"STS16":86.7,"STS17 (en-en)":91.18,"STS22 (en)":69.02,"STSBenchmark":87.25} -{"index":170,"Rank":27,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.85,"BIOSSES":86.75,"SICK-R":82.33,"STS12":77.61,"STS13":87.95,"STS14":83.85,"STS15":88.47,"STS16":86.46,"STS17 (en-en)":88.7,"STS22 (en)":68.02,"STSBenchmark":88.33} -{"index":60,"Rank":28,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":83.73,"BIOSSES":82.13,"SICK-R":83.01,"STS12":78.85,"STS13":86.84,"STS14":84.04,"STS15":88.72,"STS16":86.79,"STS17 (en-en)":90.63,"STS22 (en)":67.55,"STSBenchmark":88.72} -{"index":62,"Rank":29,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":83.58,"BIOSSES":84.92,"SICK-R":83.94,"STS12":79.27,"STS13":84.83,"STS14":82.94,"STS15":88.09,"STS16":86.54,"STS17 (en-en)":89.58,"STS22 (en)":67.67,"STSBenchmark":88.05} -{"index":66,"Rank":30,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":83.58,"BIOSSES":85.88,"SICK-R":82.25,"STS12":78.28,"STS13":85.52,"STS14":82.49,"STS15":88.76,"STS16":87.11,"STS17 (en-en)":90.1,"STS22 (en)":68.25,"STSBenchmark":87.16} -{"index":115,"Rank":31,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.51,"BIOSSES":87.95,"SICK-R":81.29,"STS12":76.16,"STS13":87.85,"STS14":83.39,"STS15":89.43,"STS16":85.35,"STS17 (en-en)":88.59,"STS22 (en)":67.81,"STSBenchmark":87.32} -{"index":176,"Rank":32,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.49,"BIOSSES":83.05,"SICK-R":83.01,"STS12":81.62,"STS13":86.82,"STS14":83.56,"STS15":88.75,"STS16":86.03,"STS17 (en-en)":88.56,"STS22 (en)":65.62,"STSBenchmark":87.84} -{"index":253,"Rank":33,"Model":"gte-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.35,"BIOSSES":88.65,"SICK-R":79.81,"STS12":76.81,"STS13":88.11,"STS14":82.66,"STS15":88.93,"STS16":84.25,"STS17 (en-en)":88.47,"STS22 (en)":69.71,"STSBenchmark":86.07} -{"index":186,"Rank":34,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.34,"BIOSSES":85.81,"SICK-R":81.75,"STS12":78.51,"STS13":86.62,"STS14":83.06,"STS15":88.39,"STS16":86.82,"STS17 (en-en)":87.9,"STS22 (en)":66.76,"STSBenchmark":87.77} -{"index":43,"Rank":35,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.26,"BIOSSES":87.34,"SICK-R":80.56,"STS12":73.69,"STS13":85.82,"STS14":82.05,"STS15":88.8,"STS16":86.2,"STS17 (en-en)":91.46,"STS22 (en)":69.21,"STSBenchmark":87.43} -{"index":36,"Rank":36,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.15,"BIOSSES":85.01,"SICK-R":82.18,"STS12":77.62,"STS13":85.16,"STS14":80.02,"STS15":88.92,"STS16":86.92,"STS17 (en-en)":90.09,"STS22 (en)":66.81,"STSBenchmark":88.79} -{"index":148,"Rank":37,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":83.15,"BIOSSES":84.39,"SICK-R":81.27,"STS12":76.28,"STS13":88.18,"STS14":81.92,"STS15":89.01,"STS16":85.49,"STS17 (en-en)":90.3,"STS22 (en)":67.74,"STSBenchmark":86.88} -{"index":150,"Rank":38,"Model":"bge-large-en-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.11,"BIOSSES":84.65,"SICK-R":81.68,"STS12":79.05,"STS13":86.37,"STS14":82.78,"STS15":88.03,"STS16":86.49,"STS17 (en-en)":87.5,"STS22 (en)":67.05,"STSBenchmark":87.52} -{"index":22,"Rank":39,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":83.11,"BIOSSES":84.65,"SICK-R":81.68,"STS12":79.05,"STS13":86.37,"STS14":82.78,"STS15":88.03,"STS16":86.49,"STS17 (en-en)":87.5,"STS22 (en)":67.05,"STSBenchmark":87.52} -{"index":114,"Rank":40,"Model":"bge-large-en-v1.5-Q4_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.11,"BIOSSES":84.65,"SICK-R":81.68,"STS12":79.05,"STS13":86.37,"STS14":82.78,"STS15":88.03,"STS16":86.49,"STS17 (en-en)":87.5,"STS22 (en)":67.05,"STSBenchmark":87.52} -{"index":149,"Rank":41,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":83.06,"BIOSSES":84.15,"SICK-R":81.7,"STS12":75.32,"STS13":87.44,"STS14":81.87,"STS15":88.94,"STS16":85.38,"STS17 (en-en)":90.54,"STS22 (en)":68.65,"STSBenchmark":86.56} -{"index":205,"Rank":42,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.04,"BIOSSES":81.37,"SICK-R":79.28,"STS12":79.55,"STS13":88.83,"STS14":83.87,"STS15":88.54,"STS16":86.49,"STS17 (en-en)":88.73,"STS22 (en)":66.88,"STSBenchmark":86.85} -{"index":126,"Rank":43,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.04,"BIOSSES":81.37,"SICK-R":79.28,"STS12":79.55,"STS13":88.83,"STS14":83.87,"STS15":88.54,"STS16":86.49,"STS17 (en-en)":88.73,"STS22 (en)":66.88,"STSBenchmark":86.85} -{"index":17,"Rank":44,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":83.04,"BIOSSES":81.37,"SICK-R":79.28,"STS12":79.55,"STS13":88.83,"STS14":83.87,"STS15":88.54,"STS16":86.49,"STS17 (en-en)":88.73,"STS22 (en)":66.88,"STSBenchmark":86.85} -{"index":118,"Rank":45,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.03,"BIOSSES":86.99,"SICK-R":80.53,"STS12":75.57,"STS13":86.26,"STS14":82.3,"STS15":88.74,"STS16":85.27,"STS17 (en-en)":89.02,"STS22 (en)":68.51,"STSBenchmark":87.08} -{"index":137,"Rank":46,"Model":"stella-base-en-v2-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.02,"BIOSSES":85.94,"SICK-R":81.06,"STS12":78.72,"STS13":84.88,"STS14":83.11,"STS15":88.74,"STS16":86.35,"STS17 (en-en)":87.71,"STS22 (en)":66.28,"STSBenchmark":87.45} -{"index":151,"Rank":47,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.02,"BIOSSES":85.94,"SICK-R":81.06,"STS12":78.72,"STS13":84.88,"STS14":83.11,"STS15":88.74,"STS16":86.35,"STS17 (en-en)":87.71,"STS22 (en)":66.28,"STSBenchmark":87.45} -{"index":169,"Rank":48,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.01,"BIOSSES":85.52,"SICK-R":81.41,"STS12":77.47,"STS13":86.38,"STS14":81.17,"STS15":88.23,"STS16":86.29,"STS17 (en-en)":90.62,"STS22 (en)":65.01,"STSBenchmark":88.02} -{"index":119,"Rank":49,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.0,"BIOSSES":87.2,"SICK-R":80.31,"STS12":75.76,"STS13":86.08,"STS14":82.28,"STS15":88.9,"STS16":85.18,"STS17 (en-en)":88.73,"STS22 (en)":68.54,"STSBenchmark":86.98} -{"index":125,"Rank":50,"Model":"NoInstruct-small-Embedding-v0-Q4_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.0,"BIOSSES":87.2,"SICK-R":80.31,"STS12":75.76,"STS13":86.08,"STS14":82.28,"STS15":88.9,"STS16":85.18,"STS17 (en-en)":88.73,"STS22 (en)":68.54,"STSBenchmark":86.98} -{"index":8,"Rank":51,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.93,"BIOSSES":84.85,"SICK-R":79.71,"STS12":77.09,"STS13":88.91,"STS14":82.08,"STS15":89.21,"STS16":84.74,"STS17 (en-en)":90.73,"STS22 (en)":62.1,"STSBenchmark":89.86} -{"index":268,"Rank":52,"Model":"gte-large-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.88,"BIOSSES":87.98,"SICK-R":79.13,"STS12":75.98,"STS13":87.55,"STS14":81.99,"STS15":88.93,"STS16":83.59,"STS17 (en-en)":88.51,"STS22 (en)":69.72,"STSBenchmark":85.4} -{"index":215,"Rank":53,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Average":82.84,"BIOSSES":85.59,"SICK-R":82.8,"STS12":76.22,"STS13":86.3,"STS14":82.09,"STS15":87.24,"STS16":84.77,"STS17 (en-en)":87.42,"STS22 (en)":69.85,"STSBenchmark":86.14} -{"index":198,"Rank":54,"Model":"bge-base-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.79,"BIOSSES":86.71,"SICK-R":80.06,"STS12":77.75,"STS13":86.08,"STS14":82.89,"STS15":88.54,"STS16":85.76,"STS17 (en-en)":87.82,"STS22 (en)":65.46,"STSBenchmark":86.79} -{"index":204,"Rank":55,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.73,"BIOSSES":82.11,"SICK-R":80.49,"STS12":77.25,"STS13":87.79,"STS14":82.91,"STS15":88.45,"STS16":85.45,"STS17 (en-en)":90.33,"STS22 (en)":66.1,"STSBenchmark":86.38} -{"index":16,"Rank":56,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.73,"BIOSSES":82.11,"SICK-R":80.49,"STS12":77.25,"STS13":87.79,"STS14":82.91,"STS15":88.45,"STS16":85.45,"STS17 (en-en)":90.33,"STS22 (en)":66.1,"STSBenchmark":86.38} -{"index":246,"Rank":57,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":82.63,"BIOSSES":80.43,"SICK-R":80.47,"STS12":78.85,"STS13":88.94,"STS14":84.86,"STS15":89.32,"STS16":84.67,"STS17 (en-en)":89.46,"STS22 (en)":65.33,"STSBenchmark":84.01} -{"index":34,"Rank":58,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.62,"BIOSSES":83.5,"SICK-R":81.27,"STS12":74.37,"STS13":85.2,"STS14":80.98,"STS15":89.23,"STS16":86.32,"STS17 (en-en)":90.34,"STS22 (en)":66.42,"STSBenchmark":88.55} -{"index":174,"Rank":59,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.61,"BIOSSES":76.91,"SICK-R":80.33,"STS12":80.59,"STS13":86.59,"STS14":82.99,"STS15":88.79,"STS16":84.44,"STS17 (en-en)":89.31,"STS22 (en)":67.97,"STSBenchmark":88.14} -{"index":178,"Rank":60,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.56,"BIOSSES":86.54,"SICK-R":83.23,"STS12":76.13,"STS13":83.19,"STS14":80.6,"STS15":87.16,"STS16":85.16,"STS17 (en-en)":90.88,"STS22 (en)":67.04,"STSBenchmark":85.67} -{"index":201,"Rank":61,"Model":"bge-large-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.5,"BIOSSES":85.64,"SICK-R":80.74,"STS12":76.07,"STS13":86.81,"STS14":81.68,"STS15":88.75,"STS16":84.61,"STS17 (en-en)":89.75,"STS22 (en)":64.1,"STSBenchmark":86.88} -{"index":15,"Rank":62,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":82.42,"BIOSSES":81.12,"SICK-R":79.15,"STS12":76.52,"STS13":88.63,"STS14":83.32,"STS15":87.5,"STS16":86.39,"STS17 (en-en)":87.79,"STS22 (en)":66.4,"STSBenchmark":87.35} -{"index":179,"Rank":63,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.4,"BIOSSES":86.94,"SICK-R":80.3,"STS12":78.03,"STS13":84.19,"STS14":82.27,"STS15":87.96,"STS16":85.48,"STS17 (en-en)":86.42,"STS22 (en)":65.95,"STSBenchmark":86.42} -{"index":20,"Rank":64,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":82.4,"BIOSSES":86.94,"SICK-R":80.3,"STS12":78.03,"STS13":84.19,"STS14":82.27,"STS15":87.96,"STS16":85.48,"STS17 (en-en)":86.42,"STS22 (en)":65.95,"STSBenchmark":86.42} -{"index":180,"Rank":65,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.4,"BIOSSES":86.94,"SICK-R":80.3,"STS12":78.03,"STS13":84.19,"STS14":82.27,"STS15":87.96,"STS16":85.48,"STS17 (en-en)":86.42,"STS22 (en)":65.95,"STSBenchmark":86.42} -{"index":120,"Rank":66,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.4,"BIOSSES":86.94,"SICK-R":80.3,"STS12":78.03,"STS13":84.19,"STS14":82.27,"STS15":87.96,"STS16":85.48,"STS17 (en-en)":86.42,"STS22 (en)":65.95,"STSBenchmark":86.42} -{"index":182,"Rank":67,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.4,"BIOSSES":86.94,"SICK-R":80.3,"STS12":78.03,"STS13":84.19,"STS14":82.27,"STS15":87.96,"STS16":85.48,"STS17 (en-en)":86.42,"STS22 (en)":65.95,"STSBenchmark":86.42} -{"index":181,"Rank":68,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.4,"BIOSSES":86.94,"SICK-R":80.3,"STS12":78.03,"STS13":84.19,"STS14":82.27,"STS15":87.96,"STS16":85.48,"STS17 (en-en)":86.42,"STS22 (en)":65.95,"STSBenchmark":86.42} -{"index":167,"Rank":69,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.37,"BIOSSES":84.82,"SICK-R":80.85,"STS12":76.1,"STS13":85.49,"STS14":80.44,"STS15":87.75,"STS16":85.42,"STS17 (en-en)":89.95,"STS22 (en)":65.13,"STSBenchmark":87.71} -{"index":252,"Rank":70,"Model":"gte-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.3,"BIOSSES":87.64,"SICK-R":78.86,"STS12":75.71,"STS13":85.73,"STS14":81.51,"STS15":88.81,"STS16":83.82,"STS17 (en-en)":87.9,"STS22 (en)":67.33,"STSBenchmark":85.73} -{"index":147,"Rank":71,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":82.29,"BIOSSES":82.31,"SICK-R":80.26,"STS12":77.02,"STS13":86.58,"STS14":81.32,"STS15":88.19,"STS16":84.88,"STS17 (en-en)":89.46,"STS22 (en)":66.45,"STSBenchmark":86.43} -{"index":254,"Rank":72,"Model":"gte-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.07,"BIOSSES":88.17,"SICK-R":77.93,"STS12":75.12,"STS13":85.09,"STS14":81.03,"STS15":88.32,"STS16":83.91,"STS17 (en-en)":87.59,"STS22 (en)":68.0,"STSBenchmark":85.57} -{"index":28,"Rank":73,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.06,"BIOSSES":86.47,"SICK-R":78.58,"STS12":78.92,"STS13":85.4,"STS14":81.64,"STS15":87.2,"STS16":85.44,"STS17 (en-en)":87.2,"STS22 (en)":64.22,"STSBenchmark":85.54} -{"index":26,"Rank":74,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.06,"BIOSSES":86.47,"SICK-R":78.58,"STS12":78.92,"STS13":85.4,"STS14":81.64,"STS15":87.2,"STS16":85.44,"STS17 (en-en)":87.2,"STS22 (en)":64.22,"STSBenchmark":85.54} -{"index":206,"Rank":75,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":82.06,"BIOSSES":86.47,"SICK-R":78.58,"STS12":78.92,"STS13":85.4,"STS14":81.64,"STS15":87.2,"STS16":85.44,"STS17 (en-en)":87.2,"STS22 (en)":64.22,"STSBenchmark":85.54} -{"index":129,"Rank":76,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":82.06,"BIOSSES":86.47,"SICK-R":78.58,"STS12":78.92,"STS13":85.4,"STS14":81.64,"STS15":87.2,"STS16":85.44,"STS17 (en-en)":87.2,"STS22 (en)":64.22,"STSBenchmark":85.54} -{"index":29,"Rank":77,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.06,"BIOSSES":86.47,"SICK-R":78.58,"STS12":78.92,"STS13":85.4,"STS14":81.64,"STS15":87.2,"STS16":85.44,"STS17 (en-en)":87.2,"STS22 (en)":64.22,"STSBenchmark":85.54} -{"index":27,"Rank":78,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.06,"BIOSSES":86.47,"SICK-R":78.58,"STS12":78.92,"STS13":85.4,"STS14":81.64,"STS15":87.2,"STS16":85.44,"STS17 (en-en)":87.2,"STS22 (en)":64.22,"STSBenchmark":85.54} -{"index":154,"Rank":79,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":82.06,"BIOSSES":84.73,"SICK-R":80.49,"STS12":75.93,"STS13":85.22,"STS14":80.54,"STS15":88.81,"STS16":85.28,"STS17 (en-en)":89.37,"STS22 (en)":62.99,"STSBenchmark":87.21} -{"index":93,"Rank":80,"Model":"e5-large-v2-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.05,"BIOSSES":83.6,"SICK-R":79.28,"STS12":76.95,"STS13":84.12,"STS14":80.46,"STS15":89.76,"STS16":85.47,"STS17 (en-en)":89.03,"STS22 (en)":64.11,"STSBenchmark":87.74} -{"index":155,"Rank":81,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":82.05,"BIOSSES":83.6,"SICK-R":79.28,"STS12":76.95,"STS13":84.12,"STS14":80.46,"STS15":89.76,"STS16":85.47,"STS17 (en-en)":89.03,"STS22 (en)":64.11,"STSBenchmark":87.74} -{"index":202,"Rank":82,"Model":"bge-small-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.04,"BIOSSES":86.66,"SICK-R":79.44,"STS12":76.17,"STS13":84.15,"STS14":81.49,"STS15":88.11,"STS16":84.99,"STS17 (en-en)":87.98,"STS22 (en)":65.07,"STSBenchmark":86.31} -{"index":18,"Rank":83,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.97,"BIOSSES":83.65,"SICK-R":79.37,"STS12":74.38,"STS13":84.71,"STS14":80.1,"STS15":87.16,"STS16":85.02,"STS17 (en-en)":90.64,"STS22 (en)":68.63,"STSBenchmark":86.04} -{"index":213,"Rank":84,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.94,"BIOSSES":84.25,"SICK-R":79.38,"STS12":78.52,"STS13":86.05,"STS14":81.54,"STS15":86.97,"STS16":84.77,"STS17 (en-en)":87.47,"STS22 (en)":65.02,"STSBenchmark":85.47} -{"index":269,"Rank":85,"Model":"gte-large-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.9,"BIOSSES":85.83,"SICK-R":79.0,"STS12":73.48,"STS13":86.14,"STS14":80.36,"STS15":88.53,"STS16":84.08,"STS17 (en-en)":89.42,"STS22 (en)":66.75,"STSBenchmark":85.45} -{"index":244,"Rank":86,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":81.83,"BIOSSES":78.93,"SICK-R":80.34,"STS12":79.11,"STS13":87.33,"STS14":83.17,"STS15":88.28,"STS16":84.36,"STS17 (en-en)":88.99,"STS22 (en)":62.39,"STSBenchmark":85.36} -{"index":283,"Rank":87,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.73,"BIOSSES":84.68,"SICK-R":79.0,"STS12":72.84,"STS13":86.1,"STS14":81.15,"STS15":88.49,"STS16":85.08,"STS17 (en-en)":90.22,"STS22 (en)":66.14,"STSBenchmark":83.56} -{"index":211,"Rank":88,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":81.7,"BIOSSES":83.3,"SICK-R":79.27,"STS12":78.3,"STS13":85.81,"STS14":81.38,"STS15":86.79,"STS16":84.56,"STS17 (en-en)":87.25,"STS22 (en)":65.24,"STSBenchmark":85.14} -{"index":245,"Rank":89,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":81.66,"BIOSSES":73.12,"SICK-R":79.98,"STS12":79.02,"STS13":88.8,"STS14":84.33,"STS15":88.89,"STS16":85.31,"STS17 (en-en)":88.91,"STS22 (en)":64.32,"STSBenchmark":83.93} -{"index":24,"Rank":90,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":81.59,"BIOSSES":83.75,"SICK-R":79.41,"STS12":77.44,"STS13":82.98,"STS14":81.84,"STS15":87.26,"STS16":84.93,"STS17 (en-en)":87.15,"STS22 (en)":65.3,"STSBenchmark":85.86} -{"index":284,"Rank":91,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.58,"BIOSSES":88.72,"SICK-R":76.73,"STS12":73.09,"STS13":84.92,"STS14":79.81,"STS15":88.01,"STS16":84.41,"STS17 (en-en)":90.94,"STS22 (en)":64.96,"STSBenchmark":84.24} -{"index":160,"Rank":92,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":81.56,"BIOSSES":82.51,"SICK-R":80.23,"STS12":80.02,"STS13":81.55,"STS14":77.72,"STS15":89.31,"STS16":85.78,"STS17 (en-en)":88.12,"STS22 (en)":63.06,"STSBenchmark":87.29} -{"index":112,"Rank":93,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.54,"BIOSSES":87.13,"SICK-R":76.49,"STS12":75.0,"STS13":87.91,"STS14":82.26,"STS15":87.87,"STS16":80.92,"STS17 (en-en)":87.23,"STS22 (en)":68.59,"STSBenchmark":81.95} -{"index":166,"Rank":94,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.52,"BIOSSES":84.65,"SICK-R":79.92,"STS12":75.8,"STS13":83.62,"STS14":78.73,"STS15":86.41,"STS16":84.5,"STS17 (en-en)":89.95,"STS22 (en)":65.81,"STSBenchmark":85.79} -{"index":270,"Rank":95,"Model":"gte-small-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.5,"BIOSSES":87.81,"SICK-R":77.31,"STS12":73.99,"STS13":84.23,"STS14":80.09,"STS15":88.2,"STS16":83.61,"STS17 (en-en)":87.8,"STS22 (en)":66.84,"STSBenchmark":85.13} -{"index":19,"Rank":96,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.43,"BIOSSES":85.39,"SICK-R":79.78,"STS12":73.88,"STS13":85.08,"STS14":79.61,"STS15":86.15,"STS16":81.6,"STS17 (en-en)":89.11,"STS22 (en)":70.59,"STSBenchmark":83.07} -{"index":210,"Rank":97,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":81.34,"BIOSSES":81.58,"SICK-R":79.24,"STS12":78.16,"STS13":86.01,"STS14":81.25,"STS15":86.51,"STS16":84.24,"STS17 (en-en)":86.44,"STS22 (en)":65.14,"STSBenchmark":84.8} -{"index":95,"Rank":98,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.26,"BIOSSES":87.6,"SICK-R":77.01,"STS12":75.67,"STS13":82.4,"STS14":79.93,"STS15":85.82,"STS16":84.5,"STS17 (en-en)":88.93,"STS22 (en)":67.1,"STSBenchmark":83.6} -{"index":199,"Rank":99,"Model":"bge-base-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.19,"BIOSSES":83.94,"SICK-R":78.64,"STS12":74.88,"STS13":83.64,"STS14":80.4,"STS15":88.01,"STS16":84.31,"STS17 (en-en)":88.3,"STS22 (en)":63.84,"STSBenchmark":85.93} -{"index":243,"Rank":100,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":81.14,"BIOSSES":75.89,"SICK-R":80.18,"STS12":78.05,"STS13":85.85,"STS14":82.19,"STS15":87.46,"STS16":84.03,"STS17 (en-en)":89.57,"STS22 (en)":62.66,"STSBenchmark":85.52} -{"index":140,"Rank":101,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":81.05,"BIOSSES":81.4,"SICK-R":78.3,"STS12":75.79,"STS13":83.58,"STS14":79.95,"STS15":88.82,"STS16":84.46,"STS17 (en-en)":87.58,"STS22 (en)":64.07,"STSBenchmark":86.52} -{"index":153,"Rank":102,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":81.05,"BIOSSES":81.4,"SICK-R":78.3,"STS12":75.79,"STS13":83.58,"STS14":79.95,"STS15":88.82,"STS16":84.46,"STS17 (en-en)":87.58,"STS22 (en)":64.07,"STSBenchmark":86.52} -{"index":282,"Rank":103,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.04,"BIOSSES":84.87,"SICK-R":79.18,"STS12":71.98,"STS13":85.52,"STS14":80.5,"STS15":87.51,"STS16":84.48,"STS17 (en-en)":88.11,"STS22 (en)":65.92,"STSBenchmark":82.34} -{"index":281,"Rank":104,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.97,"BIOSSES":86.35,"SICK-R":80.6,"STS12":69.8,"STS13":83.27,"STS14":76.09,"STS15":86.12,"STS16":85.96,"STS17 (en-en)":90.25,"STS22 (en)":68.12,"STSBenchmark":83.17} -{"index":152,"Rank":105,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":80.96,"BIOSSES":85.1,"SICK-R":79.66,"STS12":74.22,"STS13":83.31,"STS14":78.52,"STS15":88.35,"STS16":84.15,"STS17 (en-en)":87.23,"STS22 (en)":62.88,"STSBenchmark":86.18} -{"index":172,"Rank":106,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.96,"BIOSSES":84.43,"SICK-R":79.2,"STS12":74.52,"STS13":83.16,"STS14":78.09,"STS15":86.91,"STS16":83.65,"STS17 (en-en)":90.16,"STS22 (en)":64.88,"STSBenchmark":84.6} -{"index":33,"Rank":107,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.92,"BIOSSES":81.72,"SICK-R":79.65,"STS12":72.6,"STS13":82.88,"STS14":79.74,"STS15":86.98,"STS16":84.41,"STS17 (en-en)":89.22,"STS22 (en)":65.42,"STSBenchmark":86.58} -{"index":157,"Rank":108,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":80.87,"BIOSSES":84.22,"SICK-R":78.9,"STS12":75.19,"STS13":81.8,"STS14":78.48,"STS15":87.49,"STS16":84.58,"STS17 (en-en)":87.94,"STS22 (en)":63.76,"STSBenchmark":86.36} -{"index":208,"Rank":109,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.84,"BIOSSES":87.19,"SICK-R":74.19,"STS12":73.22,"STS13":84.77,"STS14":79.99,"STS15":87.27,"STS16":82.31,"STS17 (en-en)":89.54,"STS22 (en)":65.93,"STSBenchmark":83.96} -{"index":209,"Rank":110,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":80.74,"BIOSSES":80.19,"SICK-R":79.09,"STS12":77.49,"STS13":85.62,"STS14":80.5,"STS15":85.84,"STS16":83.9,"STS17 (en-en)":86.27,"STS22 (en)":64.24,"STSBenchmark":84.28} -{"index":116,"Rank":111,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.72,"BIOSSES":81.26,"SICK-R":79.09,"STS12":75.04,"STS13":83.26,"STS14":78.62,"STS15":87.03,"STS16":83.01,"STS17 (en-en)":87.36,"STS22 (en)":68.11,"STSBenchmark":84.4} -{"index":135,"Rank":112,"Model":"jina-embeddings-v2-base-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.7,"BIOSSES":81.23,"SICK-R":79.65,"STS12":74.27,"STS13":84.18,"STS14":78.81,"STS15":87.55,"STS16":85.35,"STS17 (en-en)":88.88,"STS22 (en)":62.2,"STSBenchmark":84.84} -{"index":175,"Rank":113,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":80.7,"BIOSSES":81.23,"SICK-R":79.65,"STS12":74.27,"STS13":84.18,"STS14":78.81,"STS15":87.55,"STS16":85.35,"STS17 (en-en)":88.88,"STS22 (en)":62.2,"STSBenchmark":84.84} -{"index":267,"Rank":114,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.59,"BIOSSES":82.19,"SICK-R":79.25,"STS12":73.71,"STS13":82.32,"STS14":80.07,"STS15":87.21,"STS16":82.33,"STS17 (en-en)":87.58,"STS22 (en)":66.3,"STSBenchmark":84.92} -{"index":84,"Rank":115,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.53,"BIOSSES":79.5,"SICK-R":79.59,"STS12":74.29,"STS13":85.35,"STS14":79.21,"STS15":85.52,"STS16":82.54,"STS17 (en-en)":90.44,"STS22 (en)":63.2,"STSBenchmark":85.67} -{"index":185,"Rank":116,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.47,"BIOSSES":80.72,"SICK-R":81.87,"STS12":70.75,"STS13":84.37,"STS14":74.51,"STS15":86.41,"STS16":84.74,"STS17 (en-en)":89.03,"STS22 (en)":65.87,"STSBenchmark":86.46} -{"index":107,"Rank":117,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.46,"BIOSSES":86.63,"SICK-R":75.85,"STS12":72.58,"STS13":82.39,"STS14":77.98,"STS15":86.54,"STS16":83.31,"STS17 (en-en)":88.28,"STS22 (en)":66.68,"STSBenchmark":84.38} -{"index":158,"Rank":118,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.39,"BIOSSES":79.43,"SICK-R":78.51,"STS12":76.21,"STS13":82.4,"STS14":79.0,"STS15":87.76,"STS16":83.8,"STS17 (en-en)":87.72,"STS22 (en)":63.15,"STSBenchmark":85.95} -{"index":230,"Rank":119,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":80.32,"BIOSSES":80.43,"SICK-R":80.59,"STS12":72.63,"STS13":83.48,"STS14":78.0,"STS15":85.66,"STS16":80.03,"STS17 (en-en)":90.6,"STS22 (en)":68.39,"STSBenchmark":83.42} -{"index":159,"Rank":120,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":80.26,"BIOSSES":85.06,"SICK-R":78.51,"STS12":76.7,"STS13":78.03,"STS14":76.6,"STS15":88.16,"STS16":84.28,"STS17 (en-en)":87.83,"STS22 (en)":61.83,"STSBenchmark":85.64} -{"index":248,"Rank":121,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.24,"BIOSSES":66.16,"SICK-R":79.97,"STS12":80.91,"STS13":82.86,"STS14":87.36,"STS15":88.31,"STS16":81.61,"STS17 (en-en)":85.81,"STS22 (en)":62.99,"STSBenchmark":86.45} -{"index":207,"Rank":122,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.22,"BIOSSES":85.37,"SICK-R":77.3,"STS12":77.97,"STS13":79.39,"STS14":78.17,"STS15":85.51,"STS16":84.95,"STS17 (en-en)":85.87,"STS22 (en)":64.28,"STSBenchmark":83.42} -{"index":132,"Rank":123,"Model":"embedder-100p<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.17,"BIOSSES":75.32,"SICK-R":80.91,"STS12":76.98,"STS13":82.63,"STS14":77.84,"STS15":85.92,"STS16":80.68,"STS17 (en-en)":88.99,"STS22 (en)":68.26,"STSBenchmark":84.2} -{"index":35,"Rank":124,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.09,"BIOSSES":84.01,"SICK-R":77.04,"STS12":73.22,"STS13":79.5,"STS14":76.96,"STS15":86.43,"STS16":84.33,"STS17 (en-en)":88.67,"STS22 (en)":65.31,"STSBenchmark":85.46} -{"index":136,"Rank":125,"Model":"jina-embeddings-v2-small-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.0,"BIOSSES":80.52,"SICK-R":76.72,"STS12":73.66,"STS13":83.3,"STS14":79.17,"STS15":87.3,"STS16":83.6,"STS17 (en-en)":88.23,"STS22 (en)":63.46,"STSBenchmark":84.04} -{"index":177,"Rank":126,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.0,"BIOSSES":80.52,"SICK-R":76.72,"STS12":73.66,"STS13":83.3,"STS14":79.17,"STS15":87.3,"STS16":83.6,"STS17 (en-en)":88.23,"STS22 (en)":63.46,"STSBenchmark":84.04} -{"index":168,"Rank":127,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.93,"BIOSSES":84.44,"SICK-R":78.53,"STS12":74.07,"STS13":81.81,"STS14":77.05,"STS15":84.61,"STS16":83.09,"STS17 (en-en)":89.15,"STS22 (en)":62.92,"STSBenchmark":83.63} -{"index":171,"Rank":128,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.93,"BIOSSES":83.58,"SICK-R":79.14,"STS12":75.06,"STS13":80.86,"STS14":76.13,"STS15":85.55,"STS16":81.21,"STS17 (en-en)":88.98,"STS22 (en)":66.22,"STSBenchmark":82.57} -{"index":69,"Rank":129,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.89,"BIOSSES":87.32,"SICK-R":75.63,"STS12":68.58,"STS13":80.54,"STS14":77.63,"STS15":86.16,"STS16":82.82,"STS17 (en-en)":88.57,"STS22 (en)":67.39,"STSBenchmark":84.25} -{"index":228,"Rank":130,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":79.84,"BIOSSES":83.57,"SICK-R":79.32,"STS12":73.08,"STS13":82.13,"STS14":76.73,"STS15":85.58,"STS16":80.23,"STS17 (en-en)":88.63,"STS22 (en)":66.0,"STSBenchmark":83.09} -{"index":212,"Rank":131,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":79.79,"BIOSSES":77.18,"SICK-R":78.76,"STS12":77.3,"STS13":84.18,"STS14":79.37,"STS15":84.69,"STS16":83.36,"STS17 (en-en)":85.73,"STS22 (en)":63.83,"STSBenchmark":83.46} -{"index":271,"Rank":132,"Model":"bi-cse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.66,"BIOSSES":70.56,"SICK-R":79.04,"STS12":78.39,"STS13":83.72,"STS14":79.19,"STS15":85.58,"STS16":82.33,"STS17 (en-en)":87.9,"STS22 (en)":63.92,"STSBenchmark":85.99} -{"index":42,"Rank":133,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":79.54,"BIOSSES":85.01,"SICK-R":81.47,"STS12":65.84,"STS13":78.37,"STS14":77.52,"STS15":85.43,"STS16":79.94,"STS17 (en-en)":90.12,"STS22 (en)":68.59,"STSBenchmark":83.1} -{"index":68,"Rank":134,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.18,"BIOSSES":85.05,"SICK-R":76.04,"STS12":69.5,"STS13":80.96,"STS14":77.08,"STS15":85.42,"STS16":82.3,"STS17 (en-en)":88.03,"STS22 (en)":64.12,"STSBenchmark":83.34} -{"index":217,"Rank":135,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":79.12,"BIOSSES":68.38,"SICK-R":80.77,"STS12":75.3,"STS13":84.67,"STS14":80.19,"STS15":85.4,"STS16":80.82,"STS17 (en-en)":89.44,"STS22 (en)":61.96,"STSBenchmark":84.25} -{"index":183,"Rank":136,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.11,"BIOSSES":83.86,"SICK-R":79.38,"STS12":73.65,"STS13":75.57,"STS14":72.1,"STS15":85.63,"STS16":82.07,"STS17 (en-en)":89.0,"STS22 (en)":66.16,"STSBenchmark":83.69} -{"index":162,"Rank":137,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":79.1,"BIOSSES":82.26,"SICK-R":77.51,"STS12":76.56,"STS13":76.97,"STS14":75.52,"STS15":87.12,"STS16":83.63,"STS17 (en-en)":86.44,"STS22 (en)":60.94,"STSBenchmark":84.01} -{"index":184,"Rank":138,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.02,"BIOSSES":80.25,"SICK-R":79.68,"STS12":69.94,"STS13":79.63,"STS14":71.34,"STS15":82.81,"STS16":82.65,"STS17 (en-en)":88.89,"STS22 (en)":68.48,"STSBenchmark":86.5} -{"index":229,"Rank":139,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":78.9,"BIOSSES":81.64,"SICK-R":77.58,"STS12":72.37,"STS13":80.6,"STS14":75.59,"STS15":85.39,"STS16":78.99,"STS17 (en-en)":87.59,"STS22 (en)":67.21,"STSBenchmark":82.03} -{"index":72,"Rank":140,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.81,"BIOSSES":80.78,"SICK-R":78.24,"STS12":75.19,"STS13":79.33,"STS14":76.56,"STS15":84.7,"STS16":81.44,"STS17 (en-en)":86.61,"STS22 (en)":63.43,"STSBenchmark":81.79} -{"index":106,"Rank":141,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.65,"BIOSSES":82.92,"SICK-R":73.62,"STS12":71.89,"STS13":79.85,"STS14":76.86,"STS15":84.77,"STS16":81.91,"STS17 (en-en)":86.82,"STS22 (en)":65.38,"STSBenchmark":82.5} -{"index":277,"Rank":142,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.6,"BIOSSES":78.04,"SICK-R":77.48,"STS12":72.3,"STS13":81.49,"STS14":74.74,"STS15":84.28,"STS16":82.06,"STS17 (en-en)":87.08,"STS22 (en)":64.71,"STSBenchmark":83.78} -{"index":113,"Rank":143,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.59,"BIOSSES":81.6,"SICK-R":74.18,"STS12":72.2,"STS13":80.54,"STS14":76.2,"STS15":85.2,"STS16":81.93,"STS17 (en-en)":86.61,"STS22 (en)":65.46,"STSBenchmark":81.94} -{"index":203,"Rank":144,"Model":"bge-small-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.53,"BIOSSES":78.95,"SICK-R":74.39,"STS12":72.22,"STS13":81.0,"STS14":76.99,"STS15":85.21,"STS16":82.83,"STS17 (en-en)":87.78,"STS22 (en)":64.17,"STSBenchmark":81.77} -{"index":65,"Rank":145,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":78.5,"BIOSSES":83.29,"SICK-R":75.55,"STS12":67.65,"STS13":83.9,"STS14":76.97,"STS15":83.8,"STS16":81.91,"STS17 (en-en)":85.58,"STS22 (en)":65.93,"STSBenchmark":80.42} -{"index":103,"Rank":146,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.44,"BIOSSES":86.27,"SICK-R":69.66,"STS12":68.79,"STS13":79.62,"STS14":75.58,"STS15":84.64,"STS16":82.4,"STS17 (en-en)":86.73,"STS22 (en)":69.49,"STSBenchmark":81.19} -{"index":238,"Rank":147,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":78.38,"BIOSSES":81.91,"SICK-R":74.29,"STS12":70.12,"STS13":82.72,"STS14":78.24,"STS15":86.26,"STS16":81.61,"STS17 (en-en)":85.18,"STS22 (en)":65.76,"STSBenchmark":77.73} -{"index":105,"Rank":148,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.37,"BIOSSES":83.42,"SICK-R":72.39,"STS12":71.9,"STS13":80.93,"STS14":76.6,"STS15":84.92,"STS16":80.72,"STS17 (en-en)":85.61,"STS22 (en)":65.9,"STSBenchmark":81.32} -{"index":236,"Rank":149,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":78.19,"BIOSSES":84.86,"SICK-R":73.39,"STS12":70.33,"STS13":82.19,"STS14":77.16,"STS15":86.31,"STS16":81.85,"STS17 (en-en)":83.93,"STS22 (en)":64.3,"STSBenchmark":77.6} -{"index":83,"Rank":150,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.1,"BIOSSES":86.25,"SICK-R":69.63,"STS12":67.5,"STS13":79.16,"STS14":74.46,"STS15":84.47,"STS16":80.96,"STS17 (en-en)":87.78,"STS22 (en)":69.35,"STSBenchmark":81.39} -{"index":173,"Rank":151,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.06,"BIOSSES":82.96,"SICK-R":76.33,"STS12":74.28,"STS13":78.55,"STS14":73.84,"STS15":83.71,"STS16":80.03,"STS17 (en-en)":87.49,"STS22 (en)":64.25,"STSBenchmark":79.2} -{"index":237,"Rank":152,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":77.8,"BIOSSES":78.94,"SICK-R":73.63,"STS12":69.11,"STS13":81.82,"STS14":77.07,"STS15":86.01,"STS16":82.23,"STS17 (en-en)":84.9,"STS22 (en)":66.61,"STSBenchmark":77.65} -{"index":123,"Rank":153,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.74,"BIOSSES":85.31,"SICK-R":69.82,"STS12":69.66,"STS13":79.67,"STS14":74.61,"STS15":83.81,"STS16":80.4,"STS17 (en-en)":87.07,"STS22 (en)":66.13,"STSBenchmark":80.9} -{"index":101,"Rank":154,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.08,"BIOSSES":88.24,"SICK-R":70.91,"STS12":66.11,"STS13":79.82,"STS14":73.64,"STS15":83.23,"STS16":81.58,"STS17 (en-en)":80.59,"STS22 (en)":68.79,"STSBenchmark":77.9} -{"index":235,"Rank":155,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":77.07,"BIOSSES":79.0,"SICK-R":71.45,"STS12":68.59,"STS13":79.09,"STS14":74.64,"STS15":84.85,"STS16":81.57,"STS17 (en-en)":85.8,"STS22 (en)":66.17,"STSBenchmark":79.58} -{"index":285,"Rank":156,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":76.95,"BIOSSES":83.79,"SICK-R":68.78,"STS12":64.81,"STS13":80.1,"STS14":74.96,"STS15":83.7,"STS16":80.55,"STS17 (en-en)":85.74,"STS22 (en)":67.5,"STSBenchmark":79.54} -{"index":82,"Rank":157,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.83,"BIOSSES":84.84,"SICK-R":68.2,"STS12":66.99,"STS13":77.58,"STS14":72.78,"STS15":82.62,"STS16":80.1,"STS17 (en-en)":87.25,"STS22 (en)":68.75,"STSBenchmark":79.21} -{"index":77,"Rank":158,"Model":"test24<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.56,"BIOSSES":80.94,"SICK-R":69.07,"STS12":71.78,"STS13":77.7,"STS14":74.04,"STS15":83.13,"STS16":78.88,"STS17 (en-en)":85.55,"STS22 (en)":65.55,"STSBenchmark":78.93} -{"index":214,"Rank":159,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":76.51,"BIOSSES":83.32,"SICK-R":70.2,"STS12":64.34,"STS13":80.03,"STS14":74.51,"STS15":83.3,"STS16":79.67,"STS17 (en-en)":86.32,"STS22 (en)":64.64,"STSBenchmark":78.81} -{"index":239,"Rank":160,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":76.47,"BIOSSES":77.32,"SICK-R":72.0,"STS12":68.19,"STS13":80.4,"STS14":74.02,"STS15":82.57,"STS16":79.78,"STS17 (en-en)":85.94,"STS22 (en)":67.54,"STSBenchmark":76.97} -{"index":61,"Rank":161,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":76.4,"BIOSSES":82.41,"SICK-R":71.77,"STS12":65.39,"STS13":79.26,"STS14":72.98,"STS15":82.72,"STS16":81.02,"STS17 (en-en)":86.7,"STS22 (en)":63.47,"STSBenchmark":78.32} -{"index":104,"Rank":162,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.39,"BIOSSES":84.05,"SICK-R":69.26,"STS12":65.9,"STS13":77.87,"STS14":72.82,"STS15":83.49,"STS16":80.58,"STS17 (en-en)":84.49,"STS22 (en)":66.28,"STSBenchmark":79.18} -{"index":70,"Rank":163,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.9,"BIOSSES":71.78,"SICK-R":77.34,"STS12":70.15,"STS13":78.42,"STS14":74.76,"STS15":82.0,"STS16":78.27,"STS17 (en-en)":85.85,"STS22 (en)":61.2,"STSBenchmark":79.21} -{"index":63,"Rank":164,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":75.86,"BIOSSES":84.67,"SICK-R":72.16,"STS12":61.6,"STS13":79.71,"STS14":72.11,"STS15":82.18,"STS16":79.41,"STS17 (en-en)":85.44,"STS22 (en)":63.9,"STSBenchmark":77.44} -{"index":79,"Rank":165,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.74,"BIOSSES":83.02,"SICK-R":67.23,"STS12":66.59,"STS13":77.33,"STS14":71.83,"STS15":80.66,"STS16":78.91,"STS17 (en-en)":86.99,"STS22 (en)":67.3,"STSBenchmark":77.59} -{"index":234,"Rank":166,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":75.6,"BIOSSES":78.34,"SICK-R":75.25,"STS12":72.96,"STS13":70.58,"STS14":70.29,"STS15":81.94,"STS16":76.8,"STS17 (en-en)":86.19,"STS22 (en)":62.88,"STSBenchmark":80.75} -{"index":121,"Rank":167,"Model":"snowflake-arctic-embed-l-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.48,"BIOSSES":86.35,"SICK-R":69.32,"STS12":67.85,"STS13":77.49,"STS14":69.77,"STS15":80.16,"STS16":77.94,"STS17 (en-en)":82.28,"STS22 (en)":67.97,"STSBenchmark":75.68} -{"index":99,"Rank":168,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.48,"BIOSSES":86.35,"SICK-R":69.32,"STS12":67.85,"STS13":77.49,"STS14":69.77,"STS15":80.16,"STS16":77.94,"STS17 (en-en)":82.28,"STS22 (en)":67.97,"STSBenchmark":75.68} -{"index":259,"Rank":169,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.39,"BIOSSES":78.19,"SICK-R":74.43,"STS12":72.58,"STS13":72.22,"STS14":69.98,"STS15":82.22,"STS16":76.91,"STS17 (en-en)":85.22,"STS22 (en)":61.9,"STSBenchmark":80.28} -{"index":78,"Rank":170,"Model":"test25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.37,"BIOSSES":76.49,"SICK-R":71.7,"STS12":69.73,"STS13":76.43,"STS14":73.66,"STS15":82.62,"STS16":79.49,"STS17 (en-en)":84.38,"STS22 (en)":60.61,"STSBenchmark":78.61} -{"index":260,"Rank":171,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.35,"BIOSSES":79.14,"SICK-R":76.43,"STS12":74.25,"STS13":71.82,"STS14":71.38,"STS15":82.47,"STS16":77.54,"STS17 (en-en)":86.38,"STS22 (en)":52.85,"STSBenchmark":81.24} -{"index":59,"Rank":172,"Model":"paraphrase-multilingual-mpnet-base-v2-KE_Sieve<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.11,"BIOSSES":65.29,"SICK-R":76.01,"STS12":71.25,"STS13":78.4,"STS14":74.23,"STS15":81.41,"STS16":79.13,"STS17 (en-en)":85.4,"STS22 (en)":58.63,"STSBenchmark":81.34} -{"index":100,"Rank":173,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.03,"BIOSSES":86.62,"SICK-R":69.12,"STS12":66.97,"STS13":79.12,"STS14":68.51,"STS15":79.92,"STS16":78.66,"STS17 (en-en)":81.46,"STS22 (en)":65.84,"STSBenchmark":74.1} -{"index":81,"Rank":174,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.71,"BIOSSES":70.93,"SICK-R":74.57,"STS12":69.17,"STS13":77.23,"STS14":70.99,"STS15":79.74,"STS16":77.93,"STS17 (en-en)":87.33,"STS22 (en)":59.64,"STSBenchmark":79.54} -{"index":218,"Rank":175,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":74.33,"BIOSSES":72.31,"SICK-R":72.24,"STS12":66.05,"STS13":81.49,"STS14":73.61,"STS15":79.72,"STS16":78.12,"STS17 (en-en)":83.58,"STS22 (en)":59.65,"STSBenchmark":76.52} -{"index":134,"Rank":176,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":74.23,"BIOSSES":79.76,"SICK-R":70.04,"STS12":64.38,"STS13":76.41,"STS14":69.33,"STS15":80.18,"STS16":79.57,"STS17 (en-en)":81.18,"STS22 (en)":65.51,"STSBenchmark":75.98} -{"index":258,"Rank":177,"Model":"universal-sentence-encoder-large-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.05,"BIOSSES":69.36,"SICK-R":76.84,"STS12":72.49,"STS13":73.65,"STS14":71.88,"STS15":82.9,"STS16":79.83,"STS17 (en-en)":85.19,"STS22 (en)":48.44,"STSBenchmark":79.93} -{"index":44,"Rank":178,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.7,"BIOSSES":70.87,"SICK-R":70.47,"STS12":64.0,"STS13":78.2,"STS14":70.78,"STS15":80.03,"STS16":78.8,"STS17 (en-en)":83.88,"STS22 (en)":64.39,"STSBenchmark":75.57} -{"index":80,"Rank":179,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.41,"BIOSSES":75.21,"SICK-R":65.93,"STS12":66.53,"STS13":76.17,"STS14":69.05,"STS15":79.24,"STS16":76.07,"STS17 (en-en)":84.95,"STS22 (en)":65.66,"STSBenchmark":75.34} -{"index":71,"Rank":180,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.43,"BIOSSES":77.59,"SICK-R":74.68,"STS12":54.35,"STS13":74.24,"STS14":69.99,"STS15":75.74,"STS16":73.65,"STS17 (en-en)":84.81,"STS22 (en)":62.56,"STSBenchmark":76.72} -{"index":257,"Rank":181,"Model":"universal-sentence-encoder-4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.04,"BIOSSES":63.38,"SICK-R":69.79,"STS12":67.06,"STS13":71.54,"STS14":70.59,"STS15":80.27,"STS16":75.76,"STS17 (en-en)":84.94,"STS22 (en)":60.0,"STSBenchmark":77.08} -{"index":67,"Rank":182,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":71.61,"BIOSSES":75.12,"SICK-R":69.34,"STS12":60.09,"STS13":72.52,"STS14":66.7,"STS15":77.69,"STS16":75.94,"STS17 (en-en)":81.67,"STS22 (en)":63.7,"STSBenchmark":73.36} -{"index":127,"Rank":183,"Model":"cai-lunaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.81,"BIOSSES":73.97,"SICK-R":68.99,"STS12":58.5,"STS13":74.03,"STS14":66.18,"STS15":75.55,"STS16":73.71,"STS17 (en-en)":80.14,"STS22 (en)":65.65,"STSBenchmark":71.4} -{"index":227,"Rank":184,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":70.8,"BIOSSES":78.7,"SICK-R":69.99,"STS12":65.08,"STS13":67.98,"STS14":64.03,"STS15":76.59,"STS16":72.98,"STS17 (en-en)":79.45,"STS22 (en)":60.97,"STSBenchmark":72.25} -{"index":141,"Rank":185,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.23,"BIOSSES":79.11,"SICK-R":62.94,"STS12":65.46,"STS13":62.79,"STS14":57.54,"STS15":74.25,"STS16":75.73,"STS17 (en-en)":79.94,"STS22 (en)":47.12,"STSBenchmark":67.39} -{"index":256,"Rank":186,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.8,"BIOSSES":55.2,"SICK-R":63.19,"STS12":54.06,"STS13":66.29,"STS14":65.84,"STS15":78.0,"STS16":70.64,"STS17 (en-en)":82.23,"STS22 (en)":54.53,"STSBenchmark":68.04} -{"index":11,"Rank":187,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":63.27,"BIOSSES":62.01,"SICK-R":62.86,"STS12":62.6,"STS13":59.62,"STS14":57.03,"STS15":71.57,"STS16":70.75,"STS17 (en-en)":76.73,"STS22 (en)":39.76,"STSBenchmark":69.77} -{"index":233,"Rank":188,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":62.46,"BIOSSES":50.25,"SICK-R":55.49,"STS12":53.51,"STS13":70.8,"STS14":63.56,"STS15":74.08,"STS16":64.6,"STS17 (en-en)":76.91,"STS22 (en)":53.89,"STSBenchmark":61.55} -{"index":232,"Rank":189,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":61.85,"BIOSSES":44.93,"SICK-R":55.43,"STS12":54.64,"STS13":69.16,"STS14":60.81,"STS15":72.31,"STS16":65.34,"STS17 (en-en)":77.95,"STS22 (en)":56.35,"STSBenchmark":61.54} -{"index":231,"Rank":190,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":61.02,"BIOSSES":64.95,"SICK-R":56.39,"STS12":62.49,"STS13":58.7,"STS14":54.87,"STS15":62.54,"STS16":64.27,"STS17 (en-en)":69.63,"STS22 (en)":55.06,"STSBenchmark":61.26} -{"index":122,"Rank":191,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":54.36,"BIOSSES":54.7,"SICK-R":58.65,"STS12":30.87,"STS13":59.89,"STS14":47.73,"STS15":60.29,"STS16":63.73,"STS17 (en-en)":64.1,"STS22 (en)":56.37,"STSBenchmark":47.29} -{"index":255,"Rank":192,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.1,"BIOSSES":21.34,"SICK-R":48.55,"STS12":55.59,"STS13":18.36,"STS14":28.84,"STS15":29.19,"STS16":39.05,"STS17 (en-en)":61.22,"STS22 (en)":44.45,"STSBenchmark":44.39} -{"index":2,"Rank":193,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":84.17,"SICK-R":73.05,"STS12":66.59,"STS13":83.24,"STS14":73.71,"STS15":82.4,"STS16":"","STS17 (en-en)":80.9,"STS22 (en)":"","STSBenchmark":74.85} -{"index":38,"Rank":207,"Model":"STS-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":"","SICK-R":"","STS12":"","STS13":"","STS14":"","STS15":"","STS16":"","STS17 (en-en)":"","STS22 (en)":"","STSBenchmark":0.98} -{"index":94,"Rank":234,"Model":"bge_m3e_stella<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":"","SICK-R":"","STS12":"","STS13":"","STS14":"","STS15":"","STS16":"","STS17 (en-en)":"","STS22 (en)":"","STSBenchmark":61.83} -{"index":97,"Rank":235,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":67.85,"SICK-R":57.32,"STS12":42.8,"STS13":58.77,"STS14":53.36,"STS15":69.23,"STS16":58.81,"STS17 (en-en)":68.6,"STS22 (en)":"","STSBenchmark":52.67} -{"index":98,"Rank":236,"Model":"yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":"","SICK-R":79.98,"STS12":77.64,"STS13":81.23,"STS14":77.94,"STS15":86.87,"STS16":83.31,"STS17 (en-en)":87.35,"STS22 (en)":60.99,"STSBenchmark":""} -{"index":124,"Rank":240,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":"","BIOSSES":83.38,"SICK-R":79.71,"STS12":78.73,"STS13":79.6,"STS14":79.0,"STS15":87.81,"STS16":85.4,"STS17 (en-en)":87.13,"STS22 (en)":"","STSBenchmark":84.85} -{"index":241,"Rank":268,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","BIOSSES":74.18,"SICK-R":79.61,"STS12":76.02,"STS13":80.7,"STS14":78.85,"STS15":85.84,"STS16":81.05,"STS17 (en-en)":86.87,"STS22 (en)":"","STSBenchmark":84.42} -{"index":242,"Rank":269,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","BIOSSES":76.27,"SICK-R":79.62,"STS12":77.9,"STS13":85.11,"STS14":80.81,"STS15":87.48,"STS16":83.2,"STS17 (en-en)":86.99,"STS22 (en)":"","STSBenchmark":86.82} -{"index":247,"Rank":270,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":"","SICK-R":"","STS12":"","STS13":"","STS14":"","STS15":"","STS16":"","STS17 (en-en)":"","STS22 (en)":47.06,"STSBenchmark":""} -{"index":262,"Rank":274,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":84.75,"SICK-R":80.74,"STS12":75.42,"STS13":85.48,"STS14":78.85,"STS15":85.23,"STS16":82.16,"STS17 (en-en)":"","STS22 (en)":"","STSBenchmark":82.69} -{"index":263,"Rank":275,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":0.74,"SICK-R":0.81,"STS12":0.74,"STS13":0.83,"STS14":0.75,"STS15":0.82,"STS16":0.81,"STS17 (en-en)":"","STS22 (en)":"","STSBenchmark":0.8} -{"index":278,"Rank":284,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":78.12,"SICK-R":77.02,"STS12":"","STS13":"","STS14":"","STS15":"","STS16":"","STS17 (en-en)":"","STS22 (en)":"","STSBenchmark":84.32} -{"index":279,"Rank":285,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":77.46,"SICK-R":77.26,"STS12":"","STS13":"","STS14":"","STS15":"","STS16":"","STS17 (en-en)":"","STS22 (en)":"","STSBenchmark":83.02} -{"index":280,"Rank":286,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":68.95,"SICK-R":78.72,"STS12":"","STS13":"","STS14":"","STS15":"","STS16":"","STS17 (en-en)":"","STS22 (en)":"","STSBenchmark":84.08} +{"Rank":1,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":77.53,"STS22 (fr)":74.3,"STSBenchmarkMultilingualSTS (fr)":84.69,"SICKFr":75.56,"SICKFr (fra-Latn)":75.56} +{"Rank":2,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":75.16,"STS22 (fr)":70.55,"STSBenchmarkMultilingualSTS (fr)":79.9,"SICKFr":75.1,"SICKFr (fra-Latn)":75.1} +{"Rank":3,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":73.23,"STS22 (fr)":77.95,"STSBenchmarkMultilingualSTS (fr)":75.1,"SICKFr":69.94,"SICKFr (fra-Latn)":69.94} +{"Rank":4,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":66.72,"STS22 (fr)":77.0,"STSBenchmarkMultilingualSTS (fr)":64.93,"SICKFr":62.48,"SICKFr (fra-Latn)":62.48} +{"Rank":5,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":82.74,"STSBenchmarkMultilingualSTS (fr)":79.72,"SICKFr":76.21,"SICKFr (fra-Latn)":null} +{"Rank":6,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":70.51,"STSBenchmarkMultilingualSTS (fr)":76.43,"SICKFr":68.51,"SICKFr (fra-Latn)":null} +{"Rank":7,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":79.99,"STSBenchmarkMultilingualSTS (fr)":79.02,"SICKFr":73.56,"SICKFr (fra-Latn)":null} +{"Rank":8,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":83.75,"STSBenchmarkMultilingualSTS (fr)":83.02,"SICKFr":74.09,"SICKFr (fra-Latn)":null} +{"Rank":9,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":82.76,"STSBenchmarkMultilingualSTS (fr)":82.72,"SICKFr":74.9,"SICKFr (fra-Latn)":null} +{"Rank":10,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"STS22 (fr)":58.61,"STSBenchmarkMultilingualSTS (fr)":69.82,"SICKFr":64.95,"SICKFr (fra-Latn)":null} +{"Rank":11,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":82.8,"STSBenchmarkMultilingualSTS (fr)":76.48,"SICKFr":75.5,"SICKFr (fra-Latn)":null} +{"Rank":12,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":82.76,"STSBenchmarkMultilingualSTS (fr)":81.84,"SICKFr":79.23,"SICKFr (fra-Latn)":null} +{"Rank":13,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":14,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":15,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":16,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"STS22 (fr)":40.31,"STSBenchmarkMultilingualSTS (fr)":52.25,"SICKFr":58.76,"SICKFr (fra-Latn)":null} +{"Rank":17,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"STS22 (fr)":40.4,"STSBenchmarkMultilingualSTS (fr)":52.25,"SICKFr":58.77,"SICKFr (fra-Latn)":null} +{"Rank":18,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"STS22 (fr)":38.77,"STSBenchmarkMultilingualSTS (fr)":52.25,"SICKFr":58.76,"SICKFr (fra-Latn)":null} +{"Rank":19,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":76.91} +{"Rank":20,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"STS22 (fr)":78.77,"STSBenchmarkMultilingualSTS (fr)":79.23,"SICKFr":69.6,"SICKFr (fra-Latn)":null} +{"Rank":21,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":22,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"STS22 (fr)":77.54,"STSBenchmarkMultilingualSTS (fr)":81.64,"SICKFr":74.18,"SICKFr (fra-Latn)":null} +{"Rank":23,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"STS22 (fr)":81.73,"STSBenchmarkMultilingualSTS (fr)":85.79,"SICKFr":77.7,"SICKFr (fra-Latn)":null} +{"Rank":24,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":25,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":26,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"STS22 (fr)":65.37,"STSBenchmarkMultilingualSTS (fr)":37.14,"SICKFr":53.86,"SICKFr (fra-Latn)":null} +{"Rank":27,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"STS22 (fr)":55.15,"STSBenchmarkMultilingualSTS (fr)":33.41,"SICKFr":41.9,"SICKFr (fra-Latn)":null} +{"Rank":28,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"STS22 (fr)":48.52,"STSBenchmarkMultilingualSTS (fr)":15.66,"SICKFr":34.6,"SICKFr (fra-Latn)":null} +{"Rank":29,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"STS22 (fr)":39.05,"STSBenchmarkMultilingualSTS (fr)":52.25,"SICKFr":58.75,"SICKFr (fra-Latn)":null} +{"Rank":30,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"STS22 (fr)":56.47,"STSBenchmarkMultilingualSTS (fr)":54.97,"SICKFr":58.26,"SICKFr (fra-Latn)":null} +{"Rank":31,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"STS22 (fr)":69.82,"STSBenchmarkMultilingualSTS (fr)":61.87,"SICKFr":64.39,"SICKFr (fra-Latn)":null} +{"Rank":32,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":80.62,"SICKFr":76.23,"SICKFr (fra-Latn)":75.76} +{"Rank":33,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":82.53,"SICKFr":78.78,"SICKFr (fra-Latn)":78.81} +{"Rank":34,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":79.32,"SICKFr":75.62,"SICKFr (fra-Latn)":74.67} +{"Rank":35,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":77.1,"STSBenchmarkMultilingualSTS (fr)":49.97,"SICKFr":59.94,"SICKFr (fra-Latn)":null} +{"Rank":36,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":61.35,"STSBenchmarkMultilingualSTS (fr)":36.78,"SICKFr":54.54,"SICKFr (fra-Latn)":null} +{"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"STS22 (fr)":69.51,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":63.16} +{"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":67.05} +{"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"STS22 (fr)":53.92,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":40,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"STS22 (fr)":49.43,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":41,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"STS22 (fr)":76.41,"STSBenchmarkMultilingualSTS (fr)":77.49,"SICKFr":72.49,"SICKFr (fra-Latn)":null} +{"Rank":42,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"STS22 (fr)":78.7,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":43,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"STS22 (fr)":79.43,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"STS22 (fr)":74.62,"STSBenchmarkMultilingualSTS (fr)":63.85,"SICKFr":62.11,"SICKFr (fra-Latn)":null} +{"Rank":45,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"STS22 (fr)":77.69,"STSBenchmarkMultilingualSTS (fr)":74.04,"SICKFr":71.74,"SICKFr (fra-Latn)":null} +{"Rank":46,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"STS22 (fr)":75.01,"STSBenchmarkMultilingualSTS (fr)":77.59,"SICKFr":72.83,"SICKFr (fra-Latn)":null} +{"Rank":47,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"STS22 (fr)":77.49,"STSBenchmarkMultilingualSTS (fr)":79.42,"SICKFr":75.08,"SICKFr (fra-Latn)":null} +{"Rank":48,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"STS22 (fr)":76.8,"STSBenchmarkMultilingualSTS (fr)":81.24,"SICKFr":77.07,"SICKFr (fra-Latn)":null} +{"Rank":49,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":50,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":51,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":74.1,"STSBenchmarkMultilingualSTS (fr)":83.48,"SICKFr":77.25,"SICKFr (fra-Latn)":null} +{"Rank":52,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":77.91,"STSBenchmarkMultilingualSTS (fr)":75.48,"SICKFr":71.37,"SICKFr (fra-Latn)":null} +{"Rank":53,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":71.11,"STSBenchmarkMultilingualSTS (fr)":78.16,"SICKFr":74.39,"SICKFr (fra-Latn)":null} +{"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"STS22 (fr)":56.72,"STSBenchmarkMultilingualSTS (fr)":46.23,"SICKFr":48.62,"SICKFr (fra-Latn)":null} +{"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"STS22 (fr)":55.49,"STSBenchmarkMultilingualSTS (fr)":42.32,"SICKFr":50.01,"SICKFr (fra-Latn)":null} +{"Rank":56,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":81.09,"STSBenchmarkMultilingualSTS (fr)":77.55,"SICKFr":76.28,"SICKFr (fra-Latn)":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} diff --git a/all_data_tasks/6/default.jsonl b/all_data_tasks/6/default.jsonl index 6620bee1a3c4a8b8fecd527f5b3575e73d5fb5f7..bf31ada5bfb2c7c32e3fa192789053a7d00962fd 100644 --- a/all_data_tasks/6/default.jsonl +++ b/all_data_tasks/6/default.jsonl @@ -1,184 +1,57 @@ -{"index":123,"Rank":1,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":33.6} -{"index":194,"Rank":2,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.71} -{"index":133,"Rank":3,"Model":"mxbai-embed-large-v1-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.71} -{"index":1,"Rank":4,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"SummEval":32.63} -{"index":151,"Rank":5,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.52} -{"index":137,"Rank":6,"Model":"stella-base-en-v2-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.52} -{"index":98,"Rank":7,"Model":"yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.47} -{"index":0,"Rank":8,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"SummEval":32.36} -{"index":149,"Rank":9,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"SummEval":32.32} -{"index":53,"Rank":10,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.12} -{"index":168,"Rank":11,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.06} -{"index":165,"Rank":12,"Model":"UAE-Large-V1-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.03} -{"index":111,"Rank":13,"Model":"UAE-Large-V1-Q5_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.03} -{"index":108,"Rank":14,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.03} -{"index":197,"Rank":15,"Model":"b1ade-embed-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.93} -{"index":261,"Rank":16,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.93} -{"index":148,"Rank":17,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"SummEval":31.84} -{"index":103,"Rank":18,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.75} -{"index":253,"Rank":19,"Model":"gte-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.66} -{"index":139,"Rank":20,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.66} -{"index":150,"Rank":21,"Model":"bge-large-en-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.61} -{"index":114,"Rank":22,"Model":"bge-large-en-v1.5-Q4_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.61} -{"index":170,"Rank":23,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.61} -{"index":22,"Rank":24,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"SummEval":31.61} -{"index":135,"Rank":25,"Model":"jina-embeddings-v2-base-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.6} -{"index":175,"Rank":26,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"SummEval":31.6} -{"index":242,"Rank":27,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"SummEval":31.57} -{"index":174,"Rank":28,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.55} -{"index":193,"Rank":29,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.55} -{"index":138,"Rank":30,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.49} -{"index":15,"Rank":31,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"SummEval":31.46} -{"index":83,"Rank":32,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.46} -{"index":63,"Rank":33,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"SummEval":31.45} -{"index":156,"Rank":34,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"SummEval":31.4} -{"index":243,"Rank":35,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.39} -{"index":157,"Rank":36,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"SummEval":31.39} -{"index":101,"Rank":37,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.38} -{"index":61,"Rank":38,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"SummEval":31.38} -{"index":205,"Rank":39,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.35} -{"index":126,"Rank":40,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.35} -{"index":17,"Rank":41,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"SummEval":31.35} -{"index":33,"Rank":42,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.29} -{"index":207,"Rank":43,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.28} -{"index":69,"Rank":44,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.27} -{"index":173,"Rank":45,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.25} -{"index":44,"Rank":46,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.23} -{"index":67,"Rank":47,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"SummEval":31.23} -{"index":116,"Rank":48,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.22} -{"index":215,"Rank":49,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"SummEval":31.2} -{"index":252,"Rank":50,"Model":"gte-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.17} -{"index":217,"Rank":51,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.17} -{"index":16,"Rank":52,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.17} -{"index":204,"Rank":53,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.17} -{"index":18,"Rank":54,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.17} -{"index":96,"Rank":55,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"SummEval":31.16} -{"index":158,"Rank":56,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.16} -{"index":105,"Rank":57,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.16} -{"index":218,"Rank":58,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.15} -{"index":118,"Rank":59,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.14} -{"index":284,"Rank":60,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.12} -{"index":166,"Rank":61,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.1} -{"index":182,"Rank":62,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.07} -{"index":180,"Rank":63,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.07} -{"index":120,"Rank":64,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.07} -{"index":181,"Rank":65,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.07} -{"index":20,"Rank":66,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"SummEval":31.07} -{"index":179,"Rank":67,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.07} -{"index":227,"Rank":68,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"SummEval":31.05} -{"index":134,"Rank":69,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"SummEval":31.05} -{"index":285,"Rank":70,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.03} -{"index":82,"Rank":71,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.03} -{"index":152,"Rank":72,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.01} -{"index":9,"Rank":73,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"SummEval":31.01} -{"index":183,"Rank":74,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.01} -{"index":36,"Rank":75,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.99} -{"index":58,"Rank":76,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"SummEval":30.98} -{"index":68,"Rank":77,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.97} -{"index":8,"Rank":78,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.97} -{"index":169,"Rank":79,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.97} -{"index":154,"Rank":80,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"SummEval":30.97} -{"index":117,"Rank":81,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.96} -{"index":62,"Rank":82,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"SummEval":30.94} -{"index":19,"Rank":83,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.91} -{"index":115,"Rank":84,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.87} -{"index":51,"Rank":85,"Model":"neural-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.87} -{"index":121,"Rank":86,"Model":"snowflake-arctic-embed-l-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.84} -{"index":99,"Rank":87,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.84} -{"index":6,"Rank":88,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.84} -{"index":219,"Rank":89,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.83} -{"index":186,"Rank":90,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.82} -{"index":229,"Rank":91,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"SummEval":30.81} -{"index":281,"Rank":92,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.8} -{"index":259,"Rank":93,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.79} -{"index":21,"Rank":94,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.77} -{"index":178,"Rank":95,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.73} -{"index":208,"Rank":96,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.72} -{"index":72,"Rank":97,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.71} -{"index":171,"Rank":98,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.71} -{"index":95,"Rank":99,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.71} -{"index":241,"Rank":100,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"SummEval":30.67} -{"index":238,"Rank":101,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"SummEval":30.64} -{"index":97,"Rank":102,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.62} -{"index":167,"Rank":103,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.62} -{"index":125,"Rank":104,"Model":"NoInstruct-small-Embedding-v0-Q4_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.6} -{"index":119,"Rank":105,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.6} -{"index":262,"Rank":106,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.57} -{"index":177,"Rank":107,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.56} -{"index":141,"Rank":108,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.56} -{"index":136,"Rank":109,"Model":"jina-embeddings-v2-small-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.56} -{"index":77,"Rank":110,"Model":"test24<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.52} -{"index":233,"Rank":111,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"SummEval":30.49} -{"index":211,"Rank":112,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":30.47} -{"index":79,"Rank":113,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.43} -{"index":254,"Rank":114,"Model":"gte-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.42} -{"index":35,"Rank":115,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.41} -{"index":213,"Rank":116,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.4} -{"index":161,"Rank":117,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.39} -{"index":84,"Rank":118,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.38} -{"index":214,"Rank":119,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":30.36} -{"index":100,"Rank":120,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.31} -{"index":70,"Rank":121,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.3} -{"index":153,"Rank":122,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":30.28} -{"index":140,"Rank":123,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":30.28} -{"index":81,"Rank":124,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.26} -{"index":42,"Rank":125,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"SummEval":30.26} -{"index":184,"Rank":126,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.25} -{"index":237,"Rank":127,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"SummEval":30.21} -{"index":93,"Rank":128,"Model":"e5-large-v2-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.19} -{"index":65,"Rank":129,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"SummEval":30.19} -{"index":155,"Rank":130,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"SummEval":30.19} -{"index":34,"Rank":131,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.18} -{"index":24,"Rank":132,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"SummEval":30.12} -{"index":159,"Rank":133,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"SummEval":30.11} -{"index":248,"Rank":134,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.1} -{"index":104,"Rank":135,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.1} -{"index":206,"Rank":136,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"SummEval":30.08} -{"index":27,"Rank":137,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.08} -{"index":28,"Rank":138,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.08} -{"index":26,"Rank":139,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.08} -{"index":246,"Rank":140,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"SummEval":30.08} -{"index":129,"Rank":141,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"SummEval":30.08} -{"index":29,"Rank":142,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.08} -{"index":210,"Rank":143,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":30.05} -{"index":66,"Rank":144,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"SummEval":30.01} -{"index":162,"Rank":145,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"SummEval":29.98} -{"index":64,"Rank":146,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"SummEval":29.96} -{"index":282,"Rank":147,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.92} -{"index":283,"Rank":148,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.92} -{"index":71,"Rank":149,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.91} -{"index":245,"Rank":150,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"SummEval":29.91} -{"index":106,"Rank":151,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.87} -{"index":176,"Rank":152,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.87} -{"index":147,"Rank":153,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":29.85} -{"index":172,"Rank":154,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.85} -{"index":113,"Rank":155,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.83} -{"index":43,"Rank":156,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.82} -{"index":122,"Rank":157,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":29.82} -{"index":267,"Rank":158,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.78} -{"index":260,"Rank":159,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.71} -{"index":80,"Rank":160,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.71} -{"index":160,"Rank":161,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"SummEval":29.69} -{"index":235,"Rank":162,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":29.67} -{"index":244,"Rank":163,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"SummEval":29.64} -{"index":209,"Rank":164,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":29.59} -{"index":256,"Rank":165,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.51} -{"index":236,"Rank":166,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"SummEval":29.5} -{"index":239,"Rank":167,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":29.5} -{"index":107,"Rank":168,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.47} -{"index":257,"Rank":169,"Model":"universal-sentence-encoder-4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.42} -{"index":78,"Rank":170,"Model":"test25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.33} -{"index":185,"Rank":171,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.02} -{"index":127,"Rank":172,"Model":"cai-lunaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.02} -{"index":258,"Rank":173,"Model":"universal-sentence-encoder-large-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.01} -{"index":232,"Rank":174,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"SummEval":28.87} -{"index":60,"Rank":175,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"SummEval":28.49} -{"index":212,"Rank":176,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":28.41} -{"index":228,"Rank":177,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"SummEval":27.9} -{"index":231,"Rank":178,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":27.66} -{"index":112,"Rank":179,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":27.54} -{"index":230,"Rank":180,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":27.49} -{"index":277,"Rank":181,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":26.94} -{"index":11,"Rank":182,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"SummEval":26.8} -{"index":23,"Rank":183,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":0.31} -{"index":263,"Rank":184,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":0.29} +{"Rank":1,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":31.86,"SummEvalFr":30.76,"SummEvalFr (fra-Latn)":32.96} +{"Rank":2,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":31.5,"SummEvalFr":31.85,"SummEvalFr (fra-Latn)":31.14} +{"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":30.92,"SummEvalFr":30.92,"SummEvalFr (fra-Latn)":30.92} +{"Rank":4,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":30.16,"SummEvalFr":30.16,"SummEvalFr (fra-Latn)":30.16} +{"Rank":5,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":29.47,"SummEvalFr":29.47,"SummEvalFr (fra-Latn)":29.47} +{"Rank":6,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":29.2,"SummEvalFr":29.2,"SummEvalFr (fra-Latn)":29.2} +{"Rank":7,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":28.28,"SummEvalFr":28.28,"SummEvalFr (fra-Latn)":28.29} +{"Rank":8,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":31.47,"SummEvalFr (fra-Latn)":null} +{"Rank":9,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":30.88,"SummEvalFr (fra-Latn)":null} +{"Rank":10,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":28.34,"SummEvalFr (fra-Latn)":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":30.34,"SummEvalFr (fra-Latn)":null} +{"Rank":12,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":29.96,"SummEvalFr (fra-Latn)":null} +{"Rank":13,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"SummEvalFr":31.56,"SummEvalFr (fra-Latn)":null} +{"Rank":14,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":31.4,"SummEvalFr (fra-Latn)":null} +{"Rank":15,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":31.26,"SummEvalFr (fra-Latn)":null} +{"Rank":16,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":17,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":19,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SummEvalFr":29.06,"SummEvalFr (fra-Latn)":null} +{"Rank":20,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SummEvalFr":29.13,"SummEvalFr (fra-Latn)":null} +{"Rank":21,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SummEvalFr":28.84,"SummEvalFr (fra-Latn)":null} +{"Rank":22,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":29.97} +{"Rank":23,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"SummEvalFr":29.04,"SummEvalFr (fra-Latn)":null} +{"Rank":24,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":25,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SummEvalFr":28.77,"SummEvalFr (fra-Latn)":null} +{"Rank":26,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"SummEvalFr":30.88,"SummEvalFr (fra-Latn)":null} +{"Rank":27,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":28,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":29,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SummEvalFr":31.26,"SummEvalFr (fra-Latn)":null} +{"Rank":30,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SummEvalFr":29.43,"SummEvalFr (fra-Latn)":null} +{"Rank":31,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"SummEvalFr":29.25,"SummEvalFr (fra-Latn)":null} +{"Rank":32,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"SummEvalFr":28.81,"SummEvalFr (fra-Latn)":null} +{"Rank":33,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"SummEvalFr":30.72,"SummEvalFr (fra-Latn)":null} +{"Rank":34,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"SummEvalFr":32.22,"SummEvalFr (fra-Latn)":null} +{"Rank":35,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":29.48,"SummEvalFr (fra-Latn)":null} +{"Rank":36,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":23.63,"SummEvalFr (fra-Latn)":null} +{"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":26.63} +{"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":28.11} +{"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":40,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":41,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"SummEvalFr":28.12,"SummEvalFr (fra-Latn)":null} +{"Rank":42,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":43,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"SummEvalFr":27.59,"SummEvalFr (fra-Latn)":null} +{"Rank":45,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SummEvalFr":30.01,"SummEvalFr (fra-Latn)":null} +{"Rank":46,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"SummEvalFr":30.23,"SummEvalFr (fra-Latn)":null} +{"Rank":47,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"SummEvalFr":31.59,"SummEvalFr (fra-Latn)":null} +{"Rank":48,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"SummEvalFr":30.39,"SummEvalFr (fra-Latn)":null} +{"Rank":49,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":50,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":51,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":29.33,"SummEvalFr (fra-Latn)":null} +{"Rank":52,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":28.21,"SummEvalFr (fra-Latn)":null} +{"Rank":53,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":28.56,"SummEvalFr (fra-Latn)":null} +{"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"SummEvalFr":29.14,"SummEvalFr (fra-Latn)":null} +{"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"SummEvalFr":28.89,"SummEvalFr (fra-Latn)":null} +{"Rank":56,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":30.5,"SummEvalFr (fra-Latn)":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} diff --git a/all_data_tasks/7/default.jsonl b/all_data_tasks/7/default.jsonl index 35fb064cf43602a913c09bb4e700e76edd5e5743..2b640bd39f6444a120c23534109e5574bf99a864 100644 --- a/all_data_tasks/7/default.jsonl +++ b/all_data_tasks/7/default.jsonl @@ -1,28 +1,9 @@ -{"index":12,"Rank":1,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.29,"BUCC (de-en)":99.61,"BUCC (fr-en)":99.15,"BUCC (ru-en)":97.87,"BUCC (zh-en)":99.39,"Tatoeba (afr-eng)":95.4,"Tatoeba (amh-eng)":87.14,"Tatoeba (ang-eng)":72.81,"Tatoeba (ara-eng)":91.1,"Tatoeba (arq-eng)":63.07,"Tatoeba (arz-eng)":81.86,"Tatoeba (ast-eng)":89.76,"Tatoeba (awa-eng)":91.99,"Tatoeba (aze-eng)":93.79,"Tatoeba (bel-eng)":95.55,"Tatoeba (ben-eng)":89.13,"Tatoeba (ber-eng)":55.58,"Tatoeba (bos-eng)":95.86,"Tatoeba (bre-eng)":32.94,"Tatoeba (bul-eng)":94.38,"Tatoeba (cat-eng)":94.93,"Tatoeba (cbk-eng)":84.1,"Tatoeba (ceb-eng)":70.78,"Tatoeba (ces-eng)":96.57,"Tatoeba (cha-eng)":51.46,"Tatoeba (cmn-eng)":96.37,"Tatoeba (cor-eng)":11.82,"Tatoeba (csb-eng)":66.59,"Tatoeba (cym-eng)":89.97,"Tatoeba (dan-eng)":95.3,"Tatoeba (deu-eng)":99.33,"Tatoeba (dsb-eng)":72.0,"Tatoeba (dtp-eng)":14.51,"Tatoeba (ell-eng)":95.12,"Tatoeba (epo-eng)":98.48,"Tatoeba (est-eng)":88.03,"Tatoeba (eus-eng)":84.72,"Tatoeba (fao-eng)":86.77,"Tatoeba (fin-eng)":96.92,"Tatoeba (fra-eng)":95.02,"Tatoeba (fry-eng)":82.47,"Tatoeba (gla-eng)":78.33,"Tatoeba (gle-eng)":84.99,"Tatoeba (glg-eng)":96.22,"Tatoeba (gsw-eng)":61.9,"Tatoeba (heb-eng)":91.46,"Tatoeba (hin-eng)":97.6,"Tatoeba (hrv-eng)":96.75,"Tatoeba (hsb-eng)":79.45,"Tatoeba (hun-eng)":94.33,"Tatoeba (hye-eng)":93.52,"Tatoeba (ido-eng)":91.92,"Tatoeba (ile-eng)":89.78,"Tatoeba (ina-eng)":96.27,"Tatoeba (ind-eng)":94.42,"Tatoeba (isl-eng)":95.0,"Tatoeba (ita-eng)":94.67,"Tatoeba (jav-eng)":85.28,"Tatoeba (jpn-eng)":96.12,"Tatoeba (kab-eng)":57.85,"Tatoeba (kat-eng)":92.47,"Tatoeba (kaz-eng)":87.72,"Tatoeba (khm-eng)":73.27,"Tatoeba (kor-eng)":91.97,"Tatoeba (kur-eng)":77.33,"Tatoeba (kzj-eng)":14.98,"Tatoeba (lat-eng)":74.51,"Tatoeba (lfn-eng)":80.68,"Tatoeba (lit-eng)":92.01,"Tatoeba (lvs-eng)":92.81,"Tatoeba (mal-eng)":98.93,"Tatoeba (mar-eng)":92.58,"Tatoeba (max-eng)":73.44,"Tatoeba (mhr-eng)":16.92,"Tatoeba (mkd-eng)":92.49,"Tatoeba (mon-eng)":95.49,"Tatoeba (nds-eng)":86.32,"Tatoeba (nld-eng)":97.2,"Tatoeba (nno-eng)":94.26,"Tatoeba (nob-eng)":98.2,"Tatoeba (nov-eng)":83.0,"Tatoeba (oci-eng)":71.91,"Tatoeba (orv-eng)":58.29,"Tatoeba (pam-eng)":19.63,"Tatoeba (pes-eng)":94.57,"Tatoeba (pms-eng)":77.98,"Tatoeba (pol-eng)":97.27,"Tatoeba (por-eng)":94.91,"Tatoeba (ron-eng)":97.18,"Tatoeba (rus-eng)":93.57,"Tatoeba (slk-eng)":95.13,"Tatoeba (slv-eng)":91.91,"Tatoeba (spa-eng)":98.6,"Tatoeba (sqi-eng)":96.75,"Tatoeba (srp-eng)":94.53,"Tatoeba (swe-eng)":95.72,"Tatoeba (swg-eng)":77.68,"Tatoeba (swh-eng)":80.91,"Tatoeba (tam-eng)":90.65,"Tatoeba (tat-eng)":84.61,"Tatoeba (tel-eng)":96.08,"Tatoeba (tgl-eng)":95.85,"Tatoeba (tha-eng)":96.72,"Tatoeba (tuk-eng)":49.64,"Tatoeba (tur-eng)":98.27,"Tatoeba (tzl-eng)":61.09,"Tatoeba (uig-eng)":86.82,"Tatoeba (ukr-eng)":94.78,"Tatoeba (urd-eng)":93.41,"Tatoeba (uzb-eng)":80.55,"Tatoeba (vie-eng)":97.1,"Tatoeba (war-eng)":74.4,"Tatoeba (wuu-eng)":92.34,"Tatoeba (xho-eng)":88.26,"Tatoeba (yid-eng)":90.38,"Tatoeba (yue-eng)":93.47,"Tatoeba (zsm-eng)":96.48} -{"index":8,"Rank":2,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.67,"BUCC (de-en)":98.82,"BUCC (fr-en)":98.09,"BUCC (ru-en)":97.37,"BUCC (zh-en)":98.72,"Tatoeba (afr-eng)":97.47,"Tatoeba (amh-eng)":91.67,"Tatoeba (ang-eng)":46.34,"Tatoeba (ara-eng)":93.83,"Tatoeba (arq-eng)":75.41,"Tatoeba (arz-eng)":86.76,"Tatoeba (ast-eng)":90.81,"Tatoeba (awa-eng)":87.86,"Tatoeba (aze-eng)":89.12,"Tatoeba (bel-eng)":95.68,"Tatoeba (ben-eng)":89.02,"Tatoeba (ber-eng)":29.29,"Tatoeba (bos-eng)":95.9,"Tatoeba (bre-eng)":8.48,"Tatoeba (bul-eng)":94.52,"Tatoeba (cat-eng)":96.65,"Tatoeba (cbk-eng)":80.47,"Tatoeba (ceb-eng)":74.67,"Tatoeba (ces-eng)":96.88,"Tatoeba (cha-eng)":25.11,"Tatoeba (cmn-eng)":93.8,"Tatoeba (cor-eng)":9.98,"Tatoeba (csb-eng)":77.98,"Tatoeba (cym-eng)":91.3,"Tatoeba (dan-eng)":97.09,"Tatoeba (deu-eng)":99.13,"Tatoeba (dsb-eng)":64.48,"Tatoeba (dtp-eng)":7.95,"Tatoeba (ell-eng)":95.47,"Tatoeba (epo-eng)":98.47,"Tatoeba (est-eng)":97.17,"Tatoeba (eus-eng)":95.38,"Tatoeba (fao-eng)":93.6,"Tatoeba (fin-eng)":97.83,"Tatoeba (fra-eng)":95.37,"Tatoeba (fry-eng)":59.45,"Tatoeba (gla-eng)":80.89,"Tatoeba (gle-eng)":91.63,"Tatoeba (glg-eng)":97.28,"Tatoeba (gsw-eng)":54.83,"Tatoeba (heb-eng)":92.34,"Tatoeba (hin-eng)":97.07,"Tatoeba (hrv-eng)":97.65,"Tatoeba (hsb-eng)":77.87,"Tatoeba (hun-eng)":96.93,"Tatoeba (hye-eng)":95.15,"Tatoeba (ido-eng)":93.67,"Tatoeba (ile-eng)":81.0,"Tatoeba (ina-eng)":88.66,"Tatoeba (ind-eng)":94.5,"Tatoeba (isl-eng)":96.47,"Tatoeba (ita-eng)":95.95,"Tatoeba (jav-eng)":85.76,"Tatoeba (jpn-eng)":94.67,"Tatoeba (kab-eng)":83.66,"Tatoeba (kat-eng)":93.69,"Tatoeba (kaz-eng)":88.12,"Tatoeba (khm-eng)":70.2,"Tatoeba (kor-eng)":89.99,"Tatoeba (kur-eng)":34.22,"Tatoeba (kzj-eng)":8.62,"Tatoeba (lat-eng)":36.64,"Tatoeba (lfn-eng)":65.54,"Tatoeba (lit-eng)":97.43,"Tatoeba (lvs-eng)":95.45,"Tatoeba (mal-eng)":97.67,"Tatoeba (mar-eng)":93.35,"Tatoeba (max-eng)":72.73,"Tatoeba (mhr-eng)":11.87,"Tatoeba (mkd-eng)":95.98,"Tatoeba (mon-eng)":91.59,"Tatoeba (nds-eng)":54.18,"Tatoeba (nld-eng)":97.2,"Tatoeba (nno-eng)":97.31,"Tatoeba (nob-eng)":99.07,"Tatoeba (nov-eng)":71.25,"Tatoeba (oci-eng)":84.57,"Tatoeba (orv-eng)":47.07,"Tatoeba (pam-eng)":10.3,"Tatoeba (pes-eng)":94.37,"Tatoeba (pms-eng)":66.63,"Tatoeba (pol-eng)":97.7,"Tatoeba (por-eng)":95.49,"Tatoeba (ron-eng)":97.6,"Tatoeba (rus-eng)":94.35,"Tatoeba (slk-eng)":97.63,"Tatoeba (slv-eng)":95.99,"Tatoeba (spa-eng)":98.53,"Tatoeba (sqi-eng)":98.55,"Tatoeba (srp-eng)":94.87,"Tatoeba (swe-eng)":97.03,"Tatoeba (swg-eng)":67.69,"Tatoeba (swh-eng)":91.98,"Tatoeba (tam-eng)":88.08,"Tatoeba (tat-eng)":87.05,"Tatoeba (tel-eng)":91.6,"Tatoeba (tgl-eng)":96.95,"Tatoeba (tha-eng)":96.05,"Tatoeba (tuk-eng)":91.82,"Tatoeba (tur-eng)":98.27,"Tatoeba (tzl-eng)":39.48,"Tatoeba (uig-eng)":90.07,"Tatoeba (ukr-eng)":94.3,"Tatoeba (urd-eng)":93.17,"Tatoeba (uzb-eng)":85.26,"Tatoeba (vie-eng)":97.17,"Tatoeba (war-eng)":82.83,"Tatoeba (wuu-eng)":81.79,"Tatoeba (xho-eng)":94.84,"Tatoeba (yid-eng)":94.3,"Tatoeba (yue-eng)":88.44,"Tatoeba (zsm-eng)":96.07} -{"index":19,"Rank":3,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":81.75,"BUCC (de-en)":99.35,"BUCC (fr-en)":98.72,"BUCC (ru-en)":97.78,"BUCC (zh-en)":99.16,"Tatoeba (afr-eng)":96.18,"Tatoeba (amh-eng)":91.47,"Tatoeba (ang-eng)":59.28,"Tatoeba (ara-eng)":88.8,"Tatoeba (arq-eng)":42.69,"Tatoeba (arz-eng)":76.0,"Tatoeba (ast-eng)":90.68,"Tatoeba (awa-eng)":71.7,"Tatoeba (aze-eng)":94.93,"Tatoeba (bel-eng)":95.0,"Tatoeba (ben-eng)":88.55,"Tatoeba (ber-eng)":8.4,"Tatoeba (bos-eng)":94.92,"Tatoeba (bre-eng)":15.07,"Tatoeba (bul-eng)":94.58,"Tatoeba (cat-eng)":95.38,"Tatoeba (cbk-eng)":79.44,"Tatoeba (ceb-eng)":64.42,"Tatoeba (ces-eng)":96.68,"Tatoeba (cha-eng)":31.77,"Tatoeba (cmn-eng)":95.1,"Tatoeba (cor-eng)":10.11,"Tatoeba (csb-eng)":52.57,"Tatoeba (cym-eng)":92.0,"Tatoeba (dan-eng)":95.71,"Tatoeba (deu-eng)":99.2,"Tatoeba (dsb-eng)":64.81,"Tatoeba (dtp-eng)":10.85,"Tatoeba (ell-eng)":95.35,"Tatoeba (epo-eng)":98.2,"Tatoeba (est-eng)":96.55,"Tatoeba (eus-eng)":95.01,"Tatoeba (fao-eng)":87.4,"Tatoeba (fin-eng)":96.37,"Tatoeba (fra-eng)":94.86,"Tatoeba (fry-eng)":89.31,"Tatoeba (gla-eng)":85.66,"Tatoeba (gle-eng)":93.8,"Tatoeba (glg-eng)":96.82,"Tatoeba (gsw-eng)":46.5,"Tatoeba (heb-eng)":91.53,"Tatoeba (hin-eng)":96.87,"Tatoeba (hrv-eng)":96.95,"Tatoeba (hsb-eng)":67.11,"Tatoeba (hun-eng)":96.55,"Tatoeba (hye-eng)":94.09,"Tatoeba (ido-eng)":89.42,"Tatoeba (ile-eng)":85.58,"Tatoeba (ina-eng)":95.37,"Tatoeba (ind-eng)":93.66,"Tatoeba (isl-eng)":94.75,"Tatoeba (ita-eng)":92.72,"Tatoeba (jav-eng)":79.77,"Tatoeba (jpn-eng)":95.38,"Tatoeba (kab-eng)":4.31,"Tatoeba (kat-eng)":95.02,"Tatoeba (kaz-eng)":87.49,"Tatoeba (khm-eng)":78.37,"Tatoeba (kor-eng)":90.95,"Tatoeba (kur-eng)":83.59,"Tatoeba (kzj-eng)":11.33,"Tatoeba (lat-eng)":80.07,"Tatoeba (lfn-eng)":67.54,"Tatoeba (lit-eng)":96.47,"Tatoeba (lvs-eng)":95.88,"Tatoeba (mal-eng)":98.45,"Tatoeba (mar-eng)":92.65,"Tatoeba (max-eng)":63.26,"Tatoeba (mhr-eng)":15.74,"Tatoeba (mkd-eng)":93.6,"Tatoeba (mon-eng)":95.91,"Tatoeba (nds-eng)":79.42,"Tatoeba (nld-eng)":96.07,"Tatoeba (nno-eng)":94.48,"Tatoeba (nob-eng)":98.4,"Tatoeba (nov-eng)":74.38,"Tatoeba (oci-eng)":65.81,"Tatoeba (orv-eng)":38.93,"Tatoeba (pam-eng)":10.73,"Tatoeba (pes-eng)":94.7,"Tatoeba (pms-eng)":64.57,"Tatoeba (pol-eng)":97.22,"Tatoeba (por-eng)":94.14,"Tatoeba (ron-eng)":96.92,"Tatoeba (rus-eng)":93.75,"Tatoeba (slk-eng)":96.5,"Tatoeba (slv-eng)":96.03,"Tatoeba (spa-eng)":98.4,"Tatoeba (sqi-eng)":96.76,"Tatoeba (srp-eng)":94.43,"Tatoeba (swe-eng)":95.63,"Tatoeba (swg-eng)":59.36,"Tatoeba (swh-eng)":84.5,"Tatoeba (tam-eng)":89.0,"Tatoeba (tat-eng)":85.92,"Tatoeba (tel-eng)":97.86,"Tatoeba (tgl-eng)":96.02,"Tatoeba (tha-eng)":96.14,"Tatoeba (tuk-eng)":75.27,"Tatoeba (tur-eng)":98.0,"Tatoeba (tzl-eng)":58.88,"Tatoeba (uig-eng)":92.4,"Tatoeba (ukr-eng)":93.97,"Tatoeba (urd-eng)":93.22,"Tatoeba (uzb-eng)":84.23,"Tatoeba (vie-eng)":97.2,"Tatoeba (war-eng)":60.29,"Tatoeba (wuu-eng)":90.18,"Tatoeba (xho-eng)":91.55,"Tatoeba (yid-eng)":88.79,"Tatoeba (yue-eng)":89.58,"Tatoeba (zsm-eng)":95.62} -{"index":11,"Rank":4,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":76.52,"BUCC (de-en)":99.38,"BUCC (fr-en)":98.11,"BUCC (ru-en)":97.52,"BUCC (zh-en)":99.2,"Tatoeba (afr-eng)":90.26,"Tatoeba (amh-eng)":80.69,"Tatoeba (ang-eng)":40.18,"Tatoeba (ara-eng)":85.47,"Tatoeba (arq-eng)":41.56,"Tatoeba (arz-eng)":74.73,"Tatoeba (ast-eng)":81.76,"Tatoeba (awa-eng)":72.27,"Tatoeba (aze-eng)":87.61,"Tatoeba (bel-eng)":91.2,"Tatoeba (ben-eng)":83.02,"Tatoeba (ber-eng)":38.89,"Tatoeba (bos-eng)":92.86,"Tatoeba (bre-eng)":11.1,"Tatoeba (bul-eng)":92.93,"Tatoeba (cat-eng)":91.03,"Tatoeba (cbk-eng)":69.15,"Tatoeba (ceb-eng)":55.31,"Tatoeba (ces-eng)":94.89,"Tatoeba (cha-eng)":27.21,"Tatoeba (cmn-eng)":95.28,"Tatoeba (cor-eng)":6.28,"Tatoeba (csb-eng)":36.98,"Tatoeba (cym-eng)":76.21,"Tatoeba (dan-eng)":95.08,"Tatoeba (deu-eng)":99.07,"Tatoeba (dsb-eng)":48.44,"Tatoeba (dtp-eng)":7.0,"Tatoeba (ell-eng)":93.88,"Tatoeba (epo-eng)":96.01,"Tatoeba (est-eng)":85.03,"Tatoeba (eus-eng)":77.82,"Tatoeba (fao-eng)":72.62,"Tatoeba (fin-eng)":95.44,"Tatoeba (fra-eng)":93.42,"Tatoeba (fry-eng)":63.43,"Tatoeba (gla-eng)":59.01,"Tatoeba (gle-eng)":71.48,"Tatoeba (glg-eng)":93.34,"Tatoeba (gsw-eng)":51.65,"Tatoeba (heb-eng)":86.52,"Tatoeba (hin-eng)":94.48,"Tatoeba (hrv-eng)":96.15,"Tatoeba (hsb-eng)":58.7,"Tatoeba (hun-eng)":94.01,"Tatoeba (hye-eng)":90.92,"Tatoeba (ido-eng)":83.63,"Tatoeba (ile-eng)":79.15,"Tatoeba (ina-eng)":93.61,"Tatoeba (ind-eng)":92.9,"Tatoeba (isl-eng)":91.96,"Tatoeba (ita-eng)":93.29,"Tatoeba (jav-eng)":75.46,"Tatoeba (jpn-eng)":95.28,"Tatoeba (kab-eng)":36.54,"Tatoeba (kat-eng)":84.09,"Tatoeba (kaz-eng)":79.44,"Tatoeba (khm-eng)":60.02,"Tatoeba (kor-eng)":90.65,"Tatoeba (kur-eng)":66.83,"Tatoeba (kzj-eng)":7.91,"Tatoeba (lat-eng)":53.3,"Tatoeba (lfn-eng)":63.02,"Tatoeba (lit-eng)":88.48,"Tatoeba (lvs-eng)":89.83,"Tatoeba (mal-eng)":97.7,"Tatoeba (mar-eng)":88.58,"Tatoeba (max-eng)":63.42,"Tatoeba (mhr-eng)":6.79,"Tatoeba (mkd-eng)":85.5,"Tatoeba (mon-eng)":87.53,"Tatoeba (nds-eng)":69.52,"Tatoeba (nld-eng)":96.63,"Tatoeba (nno-eng)":91.4,"Tatoeba (nob-eng)":97.2,"Tatoeba (nov-eng)":71.56,"Tatoeba (oci-eng)":54.91,"Tatoeba (orv-eng)":39.8,"Tatoeba (pam-eng)":9.28,"Tatoeba (pes-eng)":92.14,"Tatoeba (pms-eng)":59.87,"Tatoeba (pol-eng)":96.6,"Tatoeba (por-eng)":93.63,"Tatoeba (ron-eng)":94.87,"Tatoeba (rus-eng)":92.32,"Tatoeba (slk-eng)":93.13,"Tatoeba (slv-eng)":89.65,"Tatoeba (spa-eng)":97.1,"Tatoeba (sqi-eng)":94.7,"Tatoeba (srp-eng)":93.1,"Tatoeba (swe-eng)":95.3,"Tatoeba (swg-eng)":56.83,"Tatoeba (swh-eng)":71.61,"Tatoeba (tam-eng)":88.23,"Tatoeba (tat-eng)":73.65,"Tatoeba (tel-eng)":91.34,"Tatoeba (tgl-eng)":92.0,"Tatoeba (tha-eng)":95.38,"Tatoeba (tuk-eng)":33.15,"Tatoeba (tur-eng)":96.27,"Tatoeba (tzl-eng)":53.16,"Tatoeba (uig-eng)":72.08,"Tatoeba (ukr-eng)":93.32,"Tatoeba (urd-eng)":89.21,"Tatoeba (uzb-eng)":72.35,"Tatoeba (vie-eng)":97.0,"Tatoeba (war-eng)":62.26,"Tatoeba (wuu-eng)":86.23,"Tatoeba (xho-eng)":80.87,"Tatoeba (yid-eng)":76.33,"Tatoeba (yue-eng)":88.71,"Tatoeba (zsm-eng)":94.53} -{"index":9,"Rank":5,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":71.07,"BUCC (de-en)":99.5,"BUCC (fr-en)":99.09,"BUCC (ru-en)":97.84,"BUCC (zh-en)":99.23,"Tatoeba (afr-eng)":87.19,"Tatoeba (amh-eng)":22.05,"Tatoeba (ang-eng)":81.59,"Tatoeba (ara-eng)":88.76,"Tatoeba (arq-eng)":48.55,"Tatoeba (arz-eng)":70.73,"Tatoeba (ast-eng)":80.94,"Tatoeba (awa-eng)":67.45,"Tatoeba (aze-eng)":76.23,"Tatoeba (bel-eng)":88.09,"Tatoeba (ben-eng)":81.82,"Tatoeba (ber-eng)":7.62,"Tatoeba (bos-eng)":91.54,"Tatoeba (bre-eng)":14.2,"Tatoeba (bul-eng)":93.98,"Tatoeba (cat-eng)":91.89,"Tatoeba (cbk-eng)":81.32,"Tatoeba (ceb-eng)":43.15,"Tatoeba (ces-eng)":94.93,"Tatoeba (cha-eng)":44.8,"Tatoeba (cmn-eng)":95.9,"Tatoeba (cor-eng)":9.19,"Tatoeba (csb-eng)":62.92,"Tatoeba (cym-eng)":72.32,"Tatoeba (dan-eng)":93.92,"Tatoeba (deu-eng)":99.47,"Tatoeba (dsb-eng)":65.43,"Tatoeba (dtp-eng)":11.74,"Tatoeba (ell-eng)":91.42,"Tatoeba (epo-eng)":87.79,"Tatoeba (est-eng)":62.89,"Tatoeba (eus-eng)":40.51,"Tatoeba (fao-eng)":70.33,"Tatoeba (fin-eng)":90.69,"Tatoeba (fra-eng)":95.77,"Tatoeba (fry-eng)":70.98,"Tatoeba (gla-eng)":63.53,"Tatoeba (gle-eng)":73.81,"Tatoeba (glg-eng)":90.44,"Tatoeba (gsw-eng)":52.21,"Tatoeba (heb-eng)":82.82,"Tatoeba (hin-eng)":95.28,"Tatoeba (hrv-eng)":93.97,"Tatoeba (hsb-eng)":74.24,"Tatoeba (hun-eng)":89.82,"Tatoeba (hye-eng)":63.99,"Tatoeba (ido-eng)":75.11,"Tatoeba (ile-eng)":83.97,"Tatoeba (ina-eng)":95.52,"Tatoeba (ind-eng)":93.48,"Tatoeba (isl-eng)":87.57,"Tatoeba (ita-eng)":91.94,"Tatoeba (jav-eng)":38.49,"Tatoeba (jpn-eng)":94.0,"Tatoeba (kab-eng)":3.81,"Tatoeba (kat-eng)":59.67,"Tatoeba (kaz-eng)":46.89,"Tatoeba (khm-eng)":37.31,"Tatoeba (kor-eng)":91.29,"Tatoeba (kur-eng)":29.93,"Tatoeba (kzj-eng)":12.19,"Tatoeba (lat-eng)":87.94,"Tatoeba (lfn-eng)":73.89,"Tatoeba (lit-eng)":69.2,"Tatoeba (lvs-eng)":66.01,"Tatoeba (mal-eng)":55.86,"Tatoeba (mar-eng)":68.99,"Tatoeba (max-eng)":64.8,"Tatoeba (mhr-eng)":16.24,"Tatoeba (mkd-eng)":86.9,"Tatoeba (mon-eng)":37.79,"Tatoeba (nds-eng)":77.42,"Tatoeba (nld-eng)":96.4,"Tatoeba (nno-eng)":87.24,"Tatoeba (nob-eng)":96.86,"Tatoeba (nov-eng)":72.4,"Tatoeba (oci-eng)":62.2,"Tatoeba (orv-eng)":59.65,"Tatoeba (pam-eng)":14.02,"Tatoeba (pes-eng)":89.67,"Tatoeba (pms-eng)":62.3,"Tatoeba (pol-eng)":96.47,"Tatoeba (por-eng)":94.44,"Tatoeba (ron-eng)":93.0,"Tatoeba (rus-eng)":93.75,"Tatoeba (slk-eng)":88.84,"Tatoeba (slv-eng)":85.99,"Tatoeba (spa-eng)":98.8,"Tatoeba (sqi-eng)":67.06,"Tatoeba (srp-eng)":92.54,"Tatoeba (swe-eng)":92.95,"Tatoeba (swg-eng)":64.75,"Tatoeba (swh-eng)":61.6,"Tatoeba (tam-eng)":72.83,"Tatoeba (tat-eng)":37.02,"Tatoeba (tel-eng)":42.79,"Tatoeba (tgl-eng)":93.14,"Tatoeba (tha-eng)":93.64,"Tatoeba (tuk-eng)":42.01,"Tatoeba (tur-eng)":94.13,"Tatoeba (tzl-eng)":49.51,"Tatoeba (uig-eng)":38.86,"Tatoeba (ukr-eng)":94.45,"Tatoeba (urd-eng)":83.62,"Tatoeba (uzb-eng)":52.73,"Tatoeba (vie-eng)":94.83,"Tatoeba (war-eng)":41.97,"Tatoeba (wuu-eng)":89.61,"Tatoeba (xho-eng)":35.96,"Tatoeba (yid-eng)":33.12,"Tatoeba (yue-eng)":89.11,"Tatoeba (zsm-eng)":94.1} -{"index":10,"Rank":6,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":69.09,"BUCC (de-en)":99.13,"BUCC (fr-en)":97.59,"BUCC (ru-en)":97.2,"BUCC (zh-en)":98.3,"Tatoeba (afr-eng)":86.91,"Tatoeba (amh-eng)":74.93,"Tatoeba (ang-eng)":29.07,"Tatoeba (ara-eng)":82.59,"Tatoeba (arq-eng)":26.74,"Tatoeba (arz-eng)":66.79,"Tatoeba (ast-eng)":75.31,"Tatoeba (awa-eng)":68.39,"Tatoeba (aze-eng)":84.71,"Tatoeba (bel-eng)":86.7,"Tatoeba (ben-eng)":81.05,"Tatoeba (ber-eng)":23.58,"Tatoeba (bos-eng)":88.86,"Tatoeba (bre-eng)":5.44,"Tatoeba (bul-eng)":88.63,"Tatoeba (cat-eng)":84.09,"Tatoeba (cbk-eng)":60.54,"Tatoeba (ceb-eng)":45.46,"Tatoeba (ces-eng)":88.88,"Tatoeba (cha-eng)":16.95,"Tatoeba (cmn-eng)":93.35,"Tatoeba (cor-eng)":4.38,"Tatoeba (csb-eng)":24.56,"Tatoeba (cym-eng)":65.22,"Tatoeba (dan-eng)":91.4,"Tatoeba (deu-eng)":97.07,"Tatoeba (dsb-eng)":34.33,"Tatoeba (dtp-eng)":5.13,"Tatoeba (ell-eng)":89.96,"Tatoeba (epo-eng)":92.07,"Tatoeba (est-eng)":70.51,"Tatoeba (eus-eng)":56.16,"Tatoeba (fao-eng)":64.72,"Tatoeba (fin-eng)":86.15,"Tatoeba (fra-eng)":92.76,"Tatoeba (fry-eng)":50.88,"Tatoeba (gla-eng)":43.08,"Tatoeba (gle-eng)":58.36,"Tatoeba (glg-eng)":82.79,"Tatoeba (gsw-eng)":43.53,"Tatoeba (heb-eng)":74.14,"Tatoeba (hin-eng)":93.0,"Tatoeba (hrv-eng)":92.5,"Tatoeba (hsb-eng)":40.35,"Tatoeba (hun-eng)":84.32,"Tatoeba (hye-eng)":85.91,"Tatoeba (ido-eng)":74.39,"Tatoeba (ile-eng)":72.43,"Tatoeba (ina-eng)":86.11,"Tatoeba (ind-eng)":90.26,"Tatoeba (isl-eng)":76.9,"Tatoeba (ita-eng)":90.61,"Tatoeba (jav-eng)":61.25,"Tatoeba (jpn-eng)":90.3,"Tatoeba (kab-eng)":21.98,"Tatoeba (kat-eng)":77.83,"Tatoeba (kaz-eng)":75.56,"Tatoeba (khm-eng)":47.26,"Tatoeba (kor-eng)":83.37,"Tatoeba (kur-eng)":52.96,"Tatoeba (kzj-eng)":6.2,"Tatoeba (lat-eng)":39.58,"Tatoeba (lfn-eng)":52.85,"Tatoeba (lit-eng)":75.53,"Tatoeba (lvs-eng)":76.66,"Tatoeba (mal-eng)":96.72,"Tatoeba (mar-eng)":86.62,"Tatoeba (max-eng)":52.39,"Tatoeba (mhr-eng)":5.52,"Tatoeba (mkd-eng)":73.76,"Tatoeba (mon-eng)":78.37,"Tatoeba (nds-eng)":53.86,"Tatoeba (nld-eng)":93.2,"Tatoeba (nno-eng)":82.56,"Tatoeba (nob-eng)":95.9,"Tatoeba (nov-eng)":66.83,"Tatoeba (oci-eng)":35.79,"Tatoeba (orv-eng)":16.0,"Tatoeba (pam-eng)":6.92,"Tatoeba (pes-eng)":87.08,"Tatoeba (pms-eng)":44.61,"Tatoeba (pol-eng)":94.82,"Tatoeba (por-eng)":92.74,"Tatoeba (ron-eng)":91.27,"Tatoeba (rus-eng)":91.78,"Tatoeba (slk-eng)":86.4,"Tatoeba (slv-eng)":81.93,"Tatoeba (spa-eng)":96.97,"Tatoeba (sqi-eng)":90.06,"Tatoeba (srp-eng)":89.08,"Tatoeba (swe-eng)":91.33,"Tatoeba (swg-eng)":42.33,"Tatoeba (swh-eng)":66.81,"Tatoeba (tam-eng)":85.12,"Tatoeba (tat-eng)":66.92,"Tatoeba (tel-eng)":88.49,"Tatoeba (tgl-eng)":83.78,"Tatoeba (tha-eng)":94.4,"Tatoeba (tuk-eng)":19.66,"Tatoeba (tur-eng)":92.67,"Tatoeba (tzl-eng)":34.44,"Tatoeba (uig-eng)":63.08,"Tatoeba (ukr-eng)":88.29,"Tatoeba (urd-eng)":86.2,"Tatoeba (uzb-eng)":62.63,"Tatoeba (vie-eng)":94.68,"Tatoeba (war-eng)":47.18,"Tatoeba (wuu-eng)":78.65,"Tatoeba (xho-eng)":73.24,"Tatoeba (yid-eng)":63.2,"Tatoeba (yue-eng)":80.66,"Tatoeba (zsm-eng)":92.45} -{"index":0,"Rank":7,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":67.42,"BUCC (de-en)":99.21,"BUCC (fr-en)":98.39,"BUCC (ru-en)":97.62,"BUCC (zh-en)":97.7,"Tatoeba (afr-eng)":92.59,"Tatoeba (amh-eng)":80.82,"Tatoeba (ang-eng)":25.22,"Tatoeba (ara-eng)":90.14,"Tatoeba (arq-eng)":26.63,"Tatoeba (arz-eng)":66.16,"Tatoeba (ast-eng)":76.35,"Tatoeba (awa-eng)":33.74,"Tatoeba (aze-eng)":82.41,"Tatoeba (bel-eng)":79.54,"Tatoeba (ben-eng)":89.43,"Tatoeba (ber-eng)":77.63,"Tatoeba (bos-eng)":95.86,"Tatoeba (bre-eng)":31.2,"Tatoeba (bul-eng)":93.57,"Tatoeba (cat-eng)":95.8,"Tatoeba (cbk-eng)":77.17,"Tatoeba (ceb-eng)":9.93,"Tatoeba (ces-eng)":95.52,"Tatoeba (cha-eng)":14.86,"Tatoeba (cmn-eng)":85.62,"Tatoeba (cor-eng)":4.45,"Tatoeba (csb-eng)":27.03,"Tatoeba (cym-eng)":5.85,"Tatoeba (dan-eng)":95.22,"Tatoeba (deu-eng)":99.07,"Tatoeba (dsb-eng)":42.34,"Tatoeba (dtp-eng)":7.39,"Tatoeba (ell-eng)":96.2,"Tatoeba (epo-eng)":96.61,"Tatoeba (est-eng)":96.43,"Tatoeba (eus-eng)":93.32,"Tatoeba (fao-eng)":57.04,"Tatoeba (fin-eng)":96.98,"Tatoeba (fra-eng)":94.28,"Tatoeba (fry-eng)":42.07,"Tatoeba (gla-eng)":1.52,"Tatoeba (gle-eng)":4.2,"Tatoeba (glg-eng)":96.14,"Tatoeba (gsw-eng)":27.52,"Tatoeba (heb-eng)":0.0,"Tatoeba (hin-eng)":95.32,"Tatoeba (hrv-eng)":96.72,"Tatoeba (hsb-eng)":45.75,"Tatoeba (hun-eng)":95.2,"Tatoeba (hye-eng)":88.72,"Tatoeba (ido-eng)":80.86,"Tatoeba (ile-eng)":87.88,"Tatoeba (ina-eng)":93.93,"Tatoeba (ind-eng)":92.98,"Tatoeba (isl-eng)":94.32,"Tatoeba (ita-eng)":94.32,"Tatoeba (jav-eng)":9.95,"Tatoeba (jpn-eng)":93.78,"Tatoeba (kab-eng)":65.88,"Tatoeba (kat-eng)":81.16,"Tatoeba (kaz-eng)":53.3,"Tatoeba (khm-eng)":74.19,"Tatoeba (kor-eng)":87.97,"Tatoeba (kur-eng)":19.09,"Tatoeba (kzj-eng)":4.46,"Tatoeba (lat-eng)":64.81,"Tatoeba (lfn-eng)":63.39,"Tatoeba (lit-eng)":96.2,"Tatoeba (lvs-eng)":95.33,"Tatoeba (mal-eng)":98.16,"Tatoeba (mar-eng)":92.93,"Tatoeba (max-eng)":36.96,"Tatoeba (mhr-eng)":6.86,"Tatoeba (mkd-eng)":93.63,"Tatoeba (mon-eng)":3.42,"Tatoeba (nds-eng)":77.13,"Tatoeba (nld-eng)":95.35,"Tatoeba (nno-eng)":72.75,"Tatoeba (nob-eng)":95.77,"Tatoeba (nov-eng)":60.02,"Tatoeba (oci-eng)":58.13,"Tatoeba (orv-eng)":23.24,"Tatoeba (pam-eng)":3.24,"Tatoeba (pes-eng)":93.13,"Tatoeba (pms-eng)":36.23,"Tatoeba (pol-eng)":97.32,"Tatoeba (por-eng)":94.54,"Tatoeba (ron-eng)":96.52,"Tatoeba (rus-eng)":92.58,"Tatoeba (slk-eng)":95.82,"Tatoeba (slv-eng)":95.4,"Tatoeba (spa-eng)":97.33,"Tatoeba (sqi-eng)":97.22,"Tatoeba (srp-eng)":93.64,"Tatoeba (swe-eng)":95.31,"Tatoeba (swg-eng)":33.1,"Tatoeba (swh-eng)":55.66,"Tatoeba (tam-eng)":87.32,"Tatoeba (tat-eng)":34.74,"Tatoeba (tel-eng)":96.72,"Tatoeba (tgl-eng)":63.19,"Tatoeba (tha-eng)":96.38,"Tatoeba (tuk-eng)":16.35,"Tatoeba (tur-eng)":98.03,"Tatoeba (tzl-eng)":36.56,"Tatoeba (uig-eng)":56.49,"Tatoeba (ukr-eng)":93.52,"Tatoeba (urd-eng)":84.23,"Tatoeba (uzb-eng)":23.2,"Tatoeba (vie-eng)":96.73,"Tatoeba (war-eng)":8.25,"Tatoeba (wuu-eng)":75.09,"Tatoeba (xho-eng)":4.68,"Tatoeba (yid-eng)":2.49,"Tatoeba (yue-eng)":87.75,"Tatoeba (zsm-eng)":95.41} -{"index":13,"Rank":8,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":65.2,"BUCC (de-en)":97.86,"BUCC (fr-en)":92.66,"BUCC (ru-en)":93.5,"BUCC (zh-en)":88.79,"Tatoeba (afr-eng)":85.67,"Tatoeba (amh-eng)":76.13,"Tatoeba (ang-eng)":42.23,"Tatoeba (ara-eng)":72.55,"Tatoeba (arq-eng)":22.43,"Tatoeba (arz-eng)":55.14,"Tatoeba (ast-eng)":71.67,"Tatoeba (awa-eng)":74.27,"Tatoeba (aze-eng)":80.14,"Tatoeba (bel-eng)":84.68,"Tatoeba (ben-eng)":77.76,"Tatoeba (ber-eng)":16.92,"Tatoeba (bos-eng)":83.24,"Tatoeba (bre-eng)":8.24,"Tatoeba (bul-eng)":83.81,"Tatoeba (cat-eng)":79.75,"Tatoeba (cbk-eng)":57.77,"Tatoeba (ceb-eng)":42.01,"Tatoeba (ces-eng)":78.83,"Tatoeba (cha-eng)":27.83,"Tatoeba (cmn-eng)":88.89,"Tatoeba (cor-eng)":6.02,"Tatoeba (csb-eng)":23.99,"Tatoeba (cym-eng)":65.32,"Tatoeba (dan-eng)":84.39,"Tatoeba (deu-eng)":96.58,"Tatoeba (dsb-eng)":31.64,"Tatoeba (dtp-eng)":7.68,"Tatoeba (ell-eng)":84.08,"Tatoeba (epo-eng)":90.5,"Tatoeba (est-eng)":55.31,"Tatoeba (eus-eng)":54.47,"Tatoeba (fao-eng)":58.87,"Tatoeba (fin-eng)":68.56,"Tatoeba (fra-eng)":89.93,"Tatoeba (fry-eng)":51.03,"Tatoeba (gla-eng)":39.42,"Tatoeba (gle-eng)":59.59,"Tatoeba (glg-eng)":81.93,"Tatoeba (gsw-eng)":36.45,"Tatoeba (heb-eng)":67.92,"Tatoeba (hin-eng)":93.25,"Tatoeba (hrv-eng)":86.87,"Tatoeba (hsb-eng)":37.02,"Tatoeba (hun-eng)":71.84,"Tatoeba (hye-eng)":82.29,"Tatoeba (ido-eng)":70.86,"Tatoeba (ile-eng)":71.73,"Tatoeba (ina-eng)":85.6,"Tatoeba (ind-eng)":87.81,"Tatoeba (isl-eng)":62.16,"Tatoeba (ita-eng)":87.56,"Tatoeba (jav-eng)":50.66,"Tatoeba (jpn-eng)":83.7,"Tatoeba (kab-eng)":17.83,"Tatoeba (kat-eng)":76.84,"Tatoeba (kaz-eng)":73.03,"Tatoeba (khm-eng)":47.99,"Tatoeba (kor-eng)":74.16,"Tatoeba (kur-eng)":39.67,"Tatoeba (kzj-eng)":8.27,"Tatoeba (lat-eng)":39.95,"Tatoeba (lfn-eng)":52.73,"Tatoeba (lit-eng)":61.52,"Tatoeba (lvs-eng)":60.67,"Tatoeba (mal-eng)":94.78,"Tatoeba (mar-eng)":86.79,"Tatoeba (max-eng)":49.95,"Tatoeba (mhr-eng)":6.08,"Tatoeba (mkd-eng)":59.26,"Tatoeba (mon-eng)":78.22,"Tatoeba (nds-eng)":53.26,"Tatoeba (nld-eng)":90.3,"Tatoeba (nno-eng)":69.48,"Tatoeba (nob-eng)":87.8,"Tatoeba (nov-eng)":68.56,"Tatoeba (oci-eng)":42.84,"Tatoeba (orv-eng)":15.13,"Tatoeba (pam-eng)":6.97,"Tatoeba (pes-eng)":84.0,"Tatoeba (pms-eng)":42.15,"Tatoeba (pol-eng)":85.91,"Tatoeba (por-eng)":89.25,"Tatoeba (ron-eng)":83.67,"Tatoeba (rus-eng)":87.96,"Tatoeba (slk-eng)":79.96,"Tatoeba (slv-eng)":72.85,"Tatoeba (spa-eng)":91.1,"Tatoeba (sqi-eng)":88.56,"Tatoeba (srp-eng)":80.33,"Tatoeba (swe-eng)":84.97,"Tatoeba (swg-eng)":44.3,"Tatoeba (swh-eng)":66.74,"Tatoeba (tam-eng)":81.39,"Tatoeba (tat-eng)":63.38,"Tatoeba (tel-eng)":85.78,"Tatoeba (tgl-eng)":79.26,"Tatoeba (tha-eng)":89.36,"Tatoeba (tuk-eng)":18.41,"Tatoeba (tur-eng)":86.78,"Tatoeba (tzl-eng)":36.2,"Tatoeba (uig-eng)":59.33,"Tatoeba (ukr-eng)":81.62,"Tatoeba (urd-eng)":84.71,"Tatoeba (uzb-eng)":58.0,"Tatoeba (vie-eng)":87.78,"Tatoeba (war-eng)":40.86,"Tatoeba (wuu-eng)":69.05,"Tatoeba (xho-eng)":62.84,"Tatoeba (yid-eng)":66.83,"Tatoeba (yue-eng)":69.69,"Tatoeba (zsm-eng)":90.0} -{"index":24,"Rank":9,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":63.38,"BUCC (de-en)":98.59,"BUCC (fr-en)":96.89,"BUCC (ru-en)":96.44,"BUCC (zh-en)":97.56,"Tatoeba (afr-eng)":72.96,"Tatoeba (amh-eng)":53.49,"Tatoeba (ang-eng)":16.72,"Tatoeba (ara-eng)":90.19,"Tatoeba (arq-eng)":19.84,"Tatoeba (arz-eng)":55.69,"Tatoeba (ast-eng)":70.08,"Tatoeba (awa-eng)":42.83,"Tatoeba (aze-eng)":76.36,"Tatoeba (bel-eng)":79.94,"Tatoeba (ben-eng)":64.9,"Tatoeba (ber-eng)":4.88,"Tatoeba (bos-eng)":94.02,"Tatoeba (bre-eng)":6.42,"Tatoeba (bul-eng)":93.52,"Tatoeba (cat-eng)":96.05,"Tatoeba (cbk-eng)":58.68,"Tatoeba (ceb-eng)":7.39,"Tatoeba (ces-eng)":95.73,"Tatoeba (cha-eng)":12.59,"Tatoeba (cmn-eng)":95.83,"Tatoeba (cor-eng)":3.53,"Tatoeba (csb-eng)":23.73,"Tatoeba (cym-eng)":22.31,"Tatoeba (dan-eng)":96.17,"Tatoeba (deu-eng)":97.73,"Tatoeba (dsb-eng)":36.85,"Tatoeba (dtp-eng)":5.03,"Tatoeba (ell-eng)":94.93,"Tatoeba (epo-eng)":55.12,"Tatoeba (est-eng)":98.4,"Tatoeba (eus-eng)":31.33,"Tatoeba (fao-eng)":38.24,"Tatoeba (fin-eng)":95.92,"Tatoeba (fra-eng)":93.12,"Tatoeba (fry-eng)":43.54,"Tatoeba (gla-eng)":4.72,"Tatoeba (gle-eng)":16.85,"Tatoeba (glg-eng)":95.32,"Tatoeba (gsw-eng)":25.12,"Tatoeba (heb-eng)":88.26,"Tatoeba (hin-eng)":97.75,"Tatoeba (hrv-eng)":97.0,"Tatoeba (hsb-eng)":44.32,"Tatoeba (hun-eng)":94.18,"Tatoeba (hye-eng)":94.38,"Tatoeba (ido-eng)":43.91,"Tatoeba (ile-eng)":60.36,"Tatoeba (ina-eng)":84.32,"Tatoeba (ind-eng)":93.5,"Tatoeba (isl-eng)":59.25,"Tatoeba (ita-eng)":93.76,"Tatoeba (jav-eng)":23.39,"Tatoeba (jpn-eng)":92.51,"Tatoeba (kab-eng)":1.41,"Tatoeba (kat-eng)":95.46,"Tatoeba (kaz-eng)":61.49,"Tatoeba (khm-eng)":58.8,"Tatoeba (kor-eng)":93.07,"Tatoeba (kur-eng)":61.44,"Tatoeba (kzj-eng)":5.88,"Tatoeba (lat-eng)":24.25,"Tatoeba (lfn-eng)":49.56,"Tatoeba (lit-eng)":95.37,"Tatoeba (lvs-eng)":97.53,"Tatoeba (mal-eng)":88.46,"Tatoeba (mar-eng)":93.83,"Tatoeba (max-eng)":48.77,"Tatoeba (mhr-eng)":7.57,"Tatoeba (mkd-eng)":93.02,"Tatoeba (mon-eng)":96.14,"Tatoeba (nds-eng)":38.88,"Tatoeba (nld-eng)":95.5,"Tatoeba (nno-eng)":81.41,"Tatoeba (nob-eng)":98.53,"Tatoeba (nov-eng)":50.23,"Tatoeba (oci-eng)":43.49,"Tatoeba (orv-eng)":23.77,"Tatoeba (pam-eng)":5.39,"Tatoeba (pes-eng)":93.47,"Tatoeba (pms-eng)":34.19,"Tatoeba (pol-eng)":96.95,"Tatoeba (por-eng)":93.02,"Tatoeba (ron-eng)":96.43,"Tatoeba (rus-eng)":92.92,"Tatoeba (slk-eng)":96.62,"Tatoeba (slv-eng)":97.08,"Tatoeba (spa-eng)":97.0,"Tatoeba (sqi-eng)":98.57,"Tatoeba (srp-eng)":94.12,"Tatoeba (swe-eng)":95.45,"Tatoeba (swg-eng)":22.8,"Tatoeba (swh-eng)":16.02,"Tatoeba (tam-eng)":73.6,"Tatoeba (tat-eng)":10.89,"Tatoeba (tel-eng)":79.73,"Tatoeba (tgl-eng)":17.67,"Tatoeba (tha-eng)":95.99,"Tatoeba (tuk-eng)":14.91,"Tatoeba (tur-eng)":96.17,"Tatoeba (tzl-eng)":34.21,"Tatoeba (uig-eng)":48.35,"Tatoeba (ukr-eng)":92.67,"Tatoeba (urd-eng)":95.12,"Tatoeba (uzb-eng)":23.19,"Tatoeba (vie-eng)":97.23,"Tatoeba (war-eng)":7.42,"Tatoeba (wuu-eng)":78.25,"Tatoeba (xho-eng)":6.53,"Tatoeba (yid-eng)":30.73,"Tatoeba (yue-eng)":77.58,"Tatoeba (zsm-eng)":95.8} -{"index":23,"Rank":10,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":57.98,"BUCC (de-en)":97.11,"BUCC (fr-en)":94.99,"BUCC (ru-en)":95.06,"BUCC (zh-en)":95.63,"Tatoeba (afr-eng)":58.22,"Tatoeba (amh-eng)":36.21,"Tatoeba (ang-eng)":10.24,"Tatoeba (ara-eng)":87.93,"Tatoeba (arq-eng)":18.6,"Tatoeba (arz-eng)":51.26,"Tatoeba (ast-eng)":62.17,"Tatoeba (awa-eng)":33.43,"Tatoeba (aze-eng)":62.1,"Tatoeba (bel-eng)":67.73,"Tatoeba (ben-eng)":36.48,"Tatoeba (ber-eng)":4.43,"Tatoeba (bos-eng)":93.27,"Tatoeba (bre-eng)":5.56,"Tatoeba (bul-eng)":92.65,"Tatoeba (cat-eng)":94.42,"Tatoeba (cbk-eng)":55.37,"Tatoeba (ceb-eng)":8.05,"Tatoeba (ces-eng)":95.12,"Tatoeba (cha-eng)":15.98,"Tatoeba (cmn-eng)":94.93,"Tatoeba (cor-eng)":3.42,"Tatoeba (csb-eng)":21.56,"Tatoeba (cym-eng)":13.25,"Tatoeba (dan-eng)":94.8,"Tatoeba (deu-eng)":97.02,"Tatoeba (dsb-eng)":33.43,"Tatoeba (dtp-eng)":5.69,"Tatoeba (ell-eng)":95.43,"Tatoeba (epo-eng)":41.73,"Tatoeba (est-eng)":97.33,"Tatoeba (eus-eng)":23.18,"Tatoeba (fao-eng)":27.51,"Tatoeba (fin-eng)":93.1,"Tatoeba (fra-eng)":91.72,"Tatoeba (fry-eng)":31.13,"Tatoeba (gla-eng)":3.61,"Tatoeba (gle-eng)":11.62,"Tatoeba (glg-eng)":94.0,"Tatoeba (gsw-eng)":25.74,"Tatoeba (heb-eng)":86.88,"Tatoeba (hin-eng)":97.62,"Tatoeba (hrv-eng)":95.98,"Tatoeba (hsb-eng)":36.1,"Tatoeba (hun-eng)":91.58,"Tatoeba (hye-eng)":93.28,"Tatoeba (ido-eng)":40.25,"Tatoeba (ile-eng)":57.71,"Tatoeba (ina-eng)":79.13,"Tatoeba (ind-eng)":92.74,"Tatoeba (isl-eng)":24.07,"Tatoeba (ita-eng)":93.05,"Tatoeba (jav-eng)":17.04,"Tatoeba (jpn-eng)":90.41,"Tatoeba (kab-eng)":1.16,"Tatoeba (kat-eng)":95.44,"Tatoeba (kaz-eng)":34.89,"Tatoeba (khm-eng)":32.11,"Tatoeba (kor-eng)":92.52,"Tatoeba (kur-eng)":46.94,"Tatoeba (kzj-eng)":6.24,"Tatoeba (lat-eng)":19.47,"Tatoeba (lfn-eng)":47.02,"Tatoeba (lit-eng)":93.16,"Tatoeba (lvs-eng)":97.87,"Tatoeba (mal-eng)":32.2,"Tatoeba (mar-eng)":92.38,"Tatoeba (max-eng)":45.25,"Tatoeba (mhr-eng)":6.89,"Tatoeba (mkd-eng)":91.0,"Tatoeba (mon-eng)":95.04,"Tatoeba (nds-eng)":32.16,"Tatoeba (nld-eng)":94.58,"Tatoeba (nno-eng)":76.34,"Tatoeba (nob-eng)":97.73,"Tatoeba (nov-eng)":47.99,"Tatoeba (oci-eng)":38.57,"Tatoeba (orv-eng)":15.1,"Tatoeba (pam-eng)":5.41,"Tatoeba (pes-eng)":92.59,"Tatoeba (pms-eng)":30.7,"Tatoeba (pol-eng)":94.28,"Tatoeba (por-eng)":92.13,"Tatoeba (ron-eng)":95.3,"Tatoeba (rus-eng)":91.87,"Tatoeba (slk-eng)":95.15,"Tatoeba (slv-eng)":96.92,"Tatoeba (spa-eng)":95.42,"Tatoeba (sqi-eng)":98.17,"Tatoeba (srp-eng)":92.24,"Tatoeba (swe-eng)":94.42,"Tatoeba (swg-eng)":26.31,"Tatoeba (swh-eng)":14.48,"Tatoeba (tam-eng)":24.64,"Tatoeba (tat-eng)":10.25,"Tatoeba (tel-eng)":36.4,"Tatoeba (tgl-eng)":13.09,"Tatoeba (tha-eng)":96.72,"Tatoeba (tuk-eng)":15.16,"Tatoeba (tur-eng)":95.08,"Tatoeba (tzl-eng)":25.46,"Tatoeba (uig-eng)":24.39,"Tatoeba (ukr-eng)":92.82,"Tatoeba (urd-eng)":94.57,"Tatoeba (uzb-eng)":17.14,"Tatoeba (vie-eng)":95.12,"Tatoeba (war-eng)":7.25,"Tatoeba (wuu-eng)":76.0,"Tatoeba (xho-eng)":4.52,"Tatoeba (yid-eng)":14.38,"Tatoeba (yue-eng)":71.45,"Tatoeba (zsm-eng)":95.31} -{"index":5,"Rank":11,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.27,"BUCC (de-en)":98.18,"BUCC (fr-en)":98.04,"BUCC (ru-en)":94.58,"BUCC (zh-en)":96.75,"Tatoeba (afr-eng)":50.58,"Tatoeba (amh-eng)":0.36,"Tatoeba (ang-eng)":30.89,"Tatoeba (ara-eng)":79.65,"Tatoeba (arq-eng)":20.68,"Tatoeba (arz-eng)":56.04,"Tatoeba (ast-eng)":55.81,"Tatoeba (awa-eng)":41.58,"Tatoeba (aze-eng)":27.78,"Tatoeba (bel-eng)":41.21,"Tatoeba (ben-eng)":20.85,"Tatoeba (ber-eng)":5.5,"Tatoeba (bos-eng)":36.73,"Tatoeba (bre-eng)":5.46,"Tatoeba (bul-eng)":60.26,"Tatoeba (cat-eng)":63.42,"Tatoeba (cbk-eng)":56.73,"Tatoeba (ceb-eng)":10.46,"Tatoeba (ces-eng)":81.81,"Tatoeba (cha-eng)":22.8,"Tatoeba (cmn-eng)":86.13,"Tatoeba (cor-eng)":3.87,"Tatoeba (csb-eng)":20.52,"Tatoeba (cym-eng)":9.1,"Tatoeba (dan-eng)":72.49,"Tatoeba (deu-eng)":96.57,"Tatoeba (dsb-eng)":28.3,"Tatoeba (dtp-eng)":3.87,"Tatoeba (ell-eng)":10.43,"Tatoeba (epo-eng)":30.54,"Tatoeba (est-eng)":7.43,"Tatoeba (eus-eng)":11.67,"Tatoeba (fao-eng)":21.34,"Tatoeba (fin-eng)":18.11,"Tatoeba (fra-eng)":89.39,"Tatoeba (fry-eng)":37.73,"Tatoeba (gla-eng)":4.23,"Tatoeba (gle-eng)":4.4,"Tatoeba (glg-eng)":74.27,"Tatoeba (gsw-eng)":37.04,"Tatoeba (heb-eng)":72.4,"Tatoeba (hin-eng)":82.06,"Tatoeba (hrv-eng)":34.63,"Tatoeba (hsb-eng)":37.13,"Tatoeba (hun-eng)":13.18,"Tatoeba (hye-eng)":0.37,"Tatoeba (ido-eng)":48.29,"Tatoeba (ile-eng)":61.81,"Tatoeba (ina-eng)":78.25,"Tatoeba (ind-eng)":54.64,"Tatoeba (isl-eng)":12.45,"Tatoeba (ita-eng)":81.99,"Tatoeba (jav-eng)":9.0,"Tatoeba (jpn-eng)":86.92,"Tatoeba (kab-eng)":1.95,"Tatoeba (kat-eng)":0.76,"Tatoeba (kaz-eng)":9.15,"Tatoeba (khm-eng)":0.35,"Tatoeba (kor-eng)":75.76,"Tatoeba (kur-eng)":13.51,"Tatoeba (kzj-eng)":5.8,"Tatoeba (lat-eng)":21.15,"Tatoeba (lfn-eng)":47.49,"Tatoeba (lit-eng)":10.45,"Tatoeba (lvs-eng)":10.72,"Tatoeba (mal-eng)":74.47,"Tatoeba (mar-eng)":67.35,"Tatoeba (max-eng)":22.52,"Tatoeba (mhr-eng)":4.3,"Tatoeba (mkd-eng)":34.72,"Tatoeba (mon-eng)":10.95,"Tatoeba (nds-eng)":49.47,"Tatoeba (nld-eng)":87.22,"Tatoeba (nno-eng)":56.01,"Tatoeba (nob-eng)":79.21,"Tatoeba (nov-eng)":54.06,"Tatoeba (oci-eng)":39.12,"Tatoeba (orv-eng)":19.65,"Tatoeba (pam-eng)":4.84,"Tatoeba (pes-eng)":45.04,"Tatoeba (pms-eng)":34.32,"Tatoeba (pol-eng)":83.35,"Tatoeba (por-eng)":89.38,"Tatoeba (ron-eng)":59.11,"Tatoeba (rus-eng)":84.96,"Tatoeba (slk-eng)":65.42,"Tatoeba (slv-eng)":35.88,"Tatoeba (spa-eng)":93.12,"Tatoeba (sqi-eng)":15.39,"Tatoeba (srp-eng)":35.41,"Tatoeba (swe-eng)":86.33,"Tatoeba (swg-eng)":36.39,"Tatoeba (swh-eng)":7.78,"Tatoeba (tam-eng)":65.79,"Tatoeba (tat-eng)":6.67,"Tatoeba (tel-eng)":65.37,"Tatoeba (tgl-eng)":17.86,"Tatoeba (tha-eng)":3.16,"Tatoeba (tuk-eng)":9.61,"Tatoeba (tur-eng)":77.38,"Tatoeba (tzl-eng)":26.1,"Tatoeba (uig-eng)":1.31,"Tatoeba (ukr-eng)":65.33,"Tatoeba (urd-eng)":6.6,"Tatoeba (uzb-eng)":7.86,"Tatoeba (vie-eng)":50.44,"Tatoeba (war-eng)":10.67,"Tatoeba (wuu-eng)":70.58,"Tatoeba (xho-eng)":9.53,"Tatoeba (yid-eng)":1.46,"Tatoeba (yue-eng)":51.32,"Tatoeba (zsm-eng)":54.61} -{"index":17,"Rank":12,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.12,"BUCC (de-en)":87.71,"BUCC (fr-en)":98.41,"BUCC (ru-en)":78.75,"BUCC (zh-en)":98.77,"Tatoeba (afr-eng)":22.56,"Tatoeba (amh-eng)":0.1,"Tatoeba (ang-eng)":34.68,"Tatoeba (ara-eng)":90.62,"Tatoeba (arq-eng)":37.21,"Tatoeba (arz-eng)":74.8,"Tatoeba (ast-eng)":68.37,"Tatoeba (awa-eng)":53.01,"Tatoeba (aze-eng)":8.16,"Tatoeba (bel-eng)":18.49,"Tatoeba (ben-eng)":83.89,"Tatoeba (ber-eng)":5.17,"Tatoeba (bos-eng)":19.28,"Tatoeba (bre-eng)":7.41,"Tatoeba (bul-eng)":28.46,"Tatoeba (cat-eng)":90.24,"Tatoeba (cbk-eng)":61.49,"Tatoeba (ceb-eng)":9.08,"Tatoeba (ces-eng)":12.99,"Tatoeba (cha-eng)":26.96,"Tatoeba (cmn-eng)":95.7,"Tatoeba (cor-eng)":3.86,"Tatoeba (csb-eng)":11.04,"Tatoeba (cym-eng)":8.32,"Tatoeba (dan-eng)":33.6,"Tatoeba (deu-eng)":85.06,"Tatoeba (dsb-eng)":15.0,"Tatoeba (dtp-eng)":5.26,"Tatoeba (ell-eng)":13.08,"Tatoeba (epo-eng)":33.92,"Tatoeba (est-eng)":5.61,"Tatoeba (eus-eng)":74.53,"Tatoeba (fao-eng)":14.42,"Tatoeba (fin-eng)":5.48,"Tatoeba (fra-eng)":94.32,"Tatoeba (fry-eng)":32.37,"Tatoeba (gla-eng)":3.1,"Tatoeba (gle-eng)":3.91,"Tatoeba (glg-eng)":82.55,"Tatoeba (gsw-eng)":23.7,"Tatoeba (heb-eng)":7.49,"Tatoeba (hin-eng)":94.88,"Tatoeba (hrv-eng)":17.75,"Tatoeba (hsb-eng)":13.31,"Tatoeba (hun-eng)":6.65,"Tatoeba (hye-eng)":0.92,"Tatoeba (ido-eng)":47.84,"Tatoeba (ile-eng)":63.65,"Tatoeba (ina-eng)":83.73,"Tatoeba (ind-eng)":91.91,"Tatoeba (isl-eng)":8.0,"Tatoeba (ita-eng)":74.96,"Tatoeba (jav-eng)":22.26,"Tatoeba (jpn-eng)":83.88,"Tatoeba (kab-eng)":1.54,"Tatoeba (kat-eng)":1.48,"Tatoeba (kaz-eng)":9.66,"Tatoeba (khm-eng)":0.7,"Tatoeba (kor-eng)":44.79,"Tatoeba (kur-eng)":12.5,"Tatoeba (kzj-eng)":6.3,"Tatoeba (lat-eng)":46.75,"Tatoeba (lfn-eng)":46.48,"Tatoeba (lit-eng)":5.87,"Tatoeba (lvs-eng)":8.58,"Tatoeba (mal-eng)":95.05,"Tatoeba (mar-eng)":71.56,"Tatoeba (max-eng)":41.45,"Tatoeba (mhr-eng)":3.25,"Tatoeba (mkd-eng)":15.58,"Tatoeba (mon-eng)":7.37,"Tatoeba (nds-eng)":29.88,"Tatoeba (nld-eng)":49.85,"Tatoeba (nno-eng)":20.79,"Tatoeba (nob-eng)":31.94,"Tatoeba (nov-eng)":54.93,"Tatoeba (oci-eng)":45.74,"Tatoeba (orv-eng)":8.44,"Tatoeba (pam-eng)":7.03,"Tatoeba (pes-eng)":21.59,"Tatoeba (pms-eng)":36.58,"Tatoeba (pol-eng)":20.27,"Tatoeba (por-eng)":94.43,"Tatoeba (ron-eng)":33.85,"Tatoeba (rus-eng)":71.23,"Tatoeba (slk-eng)":12.3,"Tatoeba (slv-eng)":13.06,"Tatoeba (spa-eng)":98.13,"Tatoeba (sqi-eng)":15.33,"Tatoeba (srp-eng)":18.75,"Tatoeba (swe-eng)":27.82,"Tatoeba (swg-eng)":29.18,"Tatoeba (swh-eng)":27.81,"Tatoeba (tam-eng)":81.87,"Tatoeba (tat-eng)":4.96,"Tatoeba (tel-eng)":80.56,"Tatoeba (tgl-eng)":11.19,"Tatoeba (tha-eng)":6.78,"Tatoeba (tuk-eng)":6.85,"Tatoeba (tur-eng)":8.96,"Tatoeba (tzl-eng)":32.7,"Tatoeba (uig-eng)":2.02,"Tatoeba (ukr-eng)":31.12,"Tatoeba (urd-eng)":82.87,"Tatoeba (uzb-eng)":8.28,"Tatoeba (vie-eng)":95.83,"Tatoeba (war-eng)":11.23,"Tatoeba (wuu-eng)":87.91,"Tatoeba (xho-eng)":7.96,"Tatoeba (yid-eng)":0.49,"Tatoeba (yue-eng)":89.39,"Tatoeba (zsm-eng)":88.11} -{"index":15,"Rank":13,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":34.17,"BUCC (de-en)":71.06,"BUCC (fr-en)":98.17,"BUCC (ru-en)":58.45,"BUCC (zh-en)":98.6,"Tatoeba (afr-eng)":18.66,"Tatoeba (amh-eng)":0.46,"Tatoeba (ang-eng)":27.44,"Tatoeba (ara-eng)":88.21,"Tatoeba (arq-eng)":29.24,"Tatoeba (arz-eng)":73.17,"Tatoeba (ast-eng)":71.25,"Tatoeba (awa-eng)":46.87,"Tatoeba (aze-eng)":7.61,"Tatoeba (bel-eng)":12.17,"Tatoeba (ben-eng)":80.65,"Tatoeba (ber-eng)":5.53,"Tatoeba (bos-eng)":14.99,"Tatoeba (bre-eng)":6.14,"Tatoeba (bul-eng)":26.9,"Tatoeba (cat-eng)":91.17,"Tatoeba (cbk-eng)":63.69,"Tatoeba (ceb-eng)":8.15,"Tatoeba (ces-eng)":8.29,"Tatoeba (cha-eng)":22.58,"Tatoeba (cmn-eng)":95.08,"Tatoeba (cor-eng)":4.0,"Tatoeba (csb-eng)":10.26,"Tatoeba (cym-eng)":8.46,"Tatoeba (dan-eng)":24.91,"Tatoeba (deu-eng)":72.73,"Tatoeba (dsb-eng)":12.44,"Tatoeba (dtp-eng)":5.41,"Tatoeba (ell-eng)":6.72,"Tatoeba (epo-eng)":28.07,"Tatoeba (est-eng)":5.63,"Tatoeba (eus-eng)":63.81,"Tatoeba (fao-eng)":12.9,"Tatoeba (fin-eng)":5.83,"Tatoeba (fra-eng)":93.53,"Tatoeba (fry-eng)":29.09,"Tatoeba (gla-eng)":4.13,"Tatoeba (gle-eng)":4.53,"Tatoeba (glg-eng)":80.98,"Tatoeba (gsw-eng)":19.31,"Tatoeba (heb-eng)":1.92,"Tatoeba (hin-eng)":92.87,"Tatoeba (hrv-eng)":13.69,"Tatoeba (hsb-eng)":12.35,"Tatoeba (hun-eng)":5.57,"Tatoeba (hye-eng)":1.56,"Tatoeba (ido-eng)":45.82,"Tatoeba (ile-eng)":64.41,"Tatoeba (ina-eng)":83.88,"Tatoeba (ind-eng)":90.54,"Tatoeba (isl-eng)":6.93,"Tatoeba (ita-eng)":73.39,"Tatoeba (jav-eng)":19.8,"Tatoeba (jpn-eng)":73.95,"Tatoeba (kab-eng)":1.92,"Tatoeba (kat-eng)":0.71,"Tatoeba (kaz-eng)":6.75,"Tatoeba (khm-eng)":0.09,"Tatoeba (kor-eng)":29.16,"Tatoeba (kur-eng)":11.84,"Tatoeba (kzj-eng)":5.99,"Tatoeba (lat-eng)":38.34,"Tatoeba (lfn-eng)":48.25,"Tatoeba (lit-eng)":5.82,"Tatoeba (lvs-eng)":6.95,"Tatoeba (mal-eng)":93.12,"Tatoeba (mar-eng)":65.18,"Tatoeba (max-eng)":40.88,"Tatoeba (mhr-eng)":2.88,"Tatoeba (mkd-eng)":13.93,"Tatoeba (mon-eng)":4.48,"Tatoeba (nds-eng)":24.45,"Tatoeba (nld-eng)":36.81,"Tatoeba (nno-eng)":16.07,"Tatoeba (nob-eng)":22.79,"Tatoeba (nov-eng)":53.3,"Tatoeba (oci-eng)":41.08,"Tatoeba (orv-eng)":5.61,"Tatoeba (pam-eng)":7.18,"Tatoeba (pes-eng)":17.01,"Tatoeba (pms-eng)":32.19,"Tatoeba (pol-eng)":14.83,"Tatoeba (por-eng)":93.62,"Tatoeba (ron-eng)":30.35,"Tatoeba (rus-eng)":62.61,"Tatoeba (slk-eng)":11.36,"Tatoeba (slv-eng)":10.89,"Tatoeba (spa-eng)":97.77,"Tatoeba (sqi-eng)":13.17,"Tatoeba (srp-eng)":15.09,"Tatoeba (swe-eng)":21.73,"Tatoeba (swg-eng)":22.77,"Tatoeba (swh-eng)":25.43,"Tatoeba (tam-eng)":84.66,"Tatoeba (tat-eng)":4.92,"Tatoeba (tel-eng)":79.3,"Tatoeba (tgl-eng)":10.75,"Tatoeba (tha-eng)":3.08,"Tatoeba (tuk-eng)":5.83,"Tatoeba (tur-eng)":7.14,"Tatoeba (tzl-eng)":31.64,"Tatoeba (uig-eng)":1.0,"Tatoeba (ukr-eng)":25.42,"Tatoeba (urd-eng)":76.77,"Tatoeba (uzb-eng)":5.91,"Tatoeba (vie-eng)":96.05,"Tatoeba (war-eng)":10.73,"Tatoeba (wuu-eng)":84.6,"Tatoeba (xho-eng)":7.79,"Tatoeba (yid-eng)":0.28,"Tatoeba (yue-eng)":86.38,"Tatoeba (zsm-eng)":85.22} -{"index":7,"Rank":14,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":31.08,"BUCC (de-en)":54.0,"BUCC (fr-en)":97.06,"BUCC (ru-en)":45.3,"BUCC (zh-en)":97.96,"Tatoeba (afr-eng)":16.62,"Tatoeba (amh-eng)":0.03,"Tatoeba (ang-eng)":28.76,"Tatoeba (ara-eng)":85.37,"Tatoeba (arq-eng)":27.75,"Tatoeba (arz-eng)":70.66,"Tatoeba (ast-eng)":71.13,"Tatoeba (awa-eng)":35.01,"Tatoeba (aze-eng)":6.32,"Tatoeba (bel-eng)":8.03,"Tatoeba (ben-eng)":75.98,"Tatoeba (ber-eng)":4.92,"Tatoeba (bos-eng)":13.65,"Tatoeba (bre-eng)":4.67,"Tatoeba (bul-eng)":20.09,"Tatoeba (cat-eng)":88.31,"Tatoeba (cbk-eng)":64.63,"Tatoeba (ceb-eng)":6.64,"Tatoeba (ces-eng)":9.55,"Tatoeba (cha-eng)":23.26,"Tatoeba (cmn-eng)":91.45,"Tatoeba (cor-eng)":2.83,"Tatoeba (csb-eng)":10.03,"Tatoeba (cym-eng)":6.97,"Tatoeba (dan-eng)":23.52,"Tatoeba (deu-eng)":70.1,"Tatoeba (dsb-eng)":8.78,"Tatoeba (dtp-eng)":3.41,"Tatoeba (ell-eng)":5.34,"Tatoeba (epo-eng)":26.2,"Tatoeba (est-eng)":4.76,"Tatoeba (eus-eng)":53.38,"Tatoeba (fao-eng)":12.61,"Tatoeba (fin-eng)":3.41,"Tatoeba (fra-eng)":91.44,"Tatoeba (fry-eng)":24.62,"Tatoeba (gla-eng)":2.09,"Tatoeba (gle-eng)":3.26,"Tatoeba (glg-eng)":79.86,"Tatoeba (gsw-eng)":21.03,"Tatoeba (heb-eng)":1.69,"Tatoeba (hin-eng)":85.23,"Tatoeba (hrv-eng)":12.79,"Tatoeba (hsb-eng)":9.68,"Tatoeba (hun-eng)":5.07,"Tatoeba (hye-eng)":0.5,"Tatoeba (ido-eng)":43.91,"Tatoeba (ile-eng)":59.59,"Tatoeba (ina-eng)":73.67,"Tatoeba (ind-eng)":88.04,"Tatoeba (isl-eng)":6.29,"Tatoeba (ita-eng)":65.04,"Tatoeba (jav-eng)":15.02,"Tatoeba (jpn-eng)":71.36,"Tatoeba (kab-eng)":1.69,"Tatoeba (kat-eng)":0.42,"Tatoeba (kaz-eng)":3.32,"Tatoeba (khm-eng)":0.37,"Tatoeba (kor-eng)":22.39,"Tatoeba (kur-eng)":8.26,"Tatoeba (kzj-eng)":5.17,"Tatoeba (lat-eng)":28.76,"Tatoeba (lfn-eng)":44.85,"Tatoeba (lit-eng)":4.49,"Tatoeba (lvs-eng)":6.55,"Tatoeba (mal-eng)":83.3,"Tatoeba (mar-eng)":45.53,"Tatoeba (max-eng)":36.14,"Tatoeba (mhr-eng)":1.56,"Tatoeba (mkd-eng)":10.47,"Tatoeba (mon-eng)":2.85,"Tatoeba (nds-eng)":23.92,"Tatoeba (nld-eng)":29.74,"Tatoeba (nno-eng)":16.28,"Tatoeba (nob-eng)":21.07,"Tatoeba (nov-eng)":52.23,"Tatoeba (oci-eng)":40.17,"Tatoeba (orv-eng)":5.79,"Tatoeba (pam-eng)":5.85,"Tatoeba (pes-eng)":12.13,"Tatoeba (pms-eng)":31.94,"Tatoeba (pol-eng)":14.09,"Tatoeba (por-eng)":92.62,"Tatoeba (ron-eng)":27.23,"Tatoeba (rus-eng)":59.84,"Tatoeba (slk-eng)":9.98,"Tatoeba (slv-eng)":10.14,"Tatoeba (spa-eng)":94.48,"Tatoeba (sqi-eng)":10.38,"Tatoeba (srp-eng)":11.69,"Tatoeba (swe-eng)":19.53,"Tatoeba (swg-eng)":16.89,"Tatoeba (swh-eng)":16.74,"Tatoeba (tam-eng)":72.76,"Tatoeba (tat-eng)":3.59,"Tatoeba (tel-eng)":64.62,"Tatoeba (tgl-eng)":10.7,"Tatoeba (tha-eng)":2.22,"Tatoeba (tuk-eng)":5.48,"Tatoeba (tur-eng)":6.15,"Tatoeba (tzl-eng)":27.82,"Tatoeba (uig-eng)":1.27,"Tatoeba (ukr-eng)":22.06,"Tatoeba (urd-eng)":70.1,"Tatoeba (uzb-eng)":4.71,"Tatoeba (vie-eng)":94.2,"Tatoeba (war-eng)":10.38,"Tatoeba (wuu-eng)":79.58,"Tatoeba (xho-eng)":5.51,"Tatoeba (yid-eng)":0.16,"Tatoeba (yue-eng)":77.03,"Tatoeba (zsm-eng)":79.95} -{"index":14,"Rank":15,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.85,"BUCC (de-en)":42.03,"BUCC (fr-en)":97.71,"BUCC (ru-en)":24.11,"BUCC (zh-en)":98.24,"Tatoeba (afr-eng)":14.54,"Tatoeba (amh-eng)":0.12,"Tatoeba (ang-eng)":19.43,"Tatoeba (ara-eng)":86.43,"Tatoeba (arq-eng)":26.28,"Tatoeba (arz-eng)":66.01,"Tatoeba (ast-eng)":68.16,"Tatoeba (awa-eng)":45.66,"Tatoeba (aze-eng)":6.63,"Tatoeba (bel-eng)":9.62,"Tatoeba (ben-eng)":78.76,"Tatoeba (ber-eng)":4.58,"Tatoeba (bos-eng)":11.93,"Tatoeba (bre-eng)":5.35,"Tatoeba (bul-eng)":14.25,"Tatoeba (cat-eng)":84.54,"Tatoeba (cbk-eng)":61.04,"Tatoeba (ceb-eng)":7.75,"Tatoeba (ces-eng)":5.64,"Tatoeba (cha-eng)":22.9,"Tatoeba (cmn-eng)":93.97,"Tatoeba (cor-eng)":3.13,"Tatoeba (csb-eng)":9.04,"Tatoeba (cym-eng)":7.77,"Tatoeba (dan-eng)":17.55,"Tatoeba (deu-eng)":53.27,"Tatoeba (dsb-eng)":9.57,"Tatoeba (dtp-eng)":3.82,"Tatoeba (ell-eng)":2.34,"Tatoeba (epo-eng)":23.11,"Tatoeba (est-eng)":4.47,"Tatoeba (eus-eng)":59.64,"Tatoeba (fao-eng)":9.42,"Tatoeba (fin-eng)":4.27,"Tatoeba (fra-eng)":92.77,"Tatoeba (fry-eng)":25.34,"Tatoeba (gla-eng)":2.99,"Tatoeba (gle-eng)":3.96,"Tatoeba (glg-eng)":75.92,"Tatoeba (gsw-eng)":21.86,"Tatoeba (heb-eng)":0.69,"Tatoeba (hin-eng)":91.53,"Tatoeba (hrv-eng)":9.73,"Tatoeba (hsb-eng)":9.78,"Tatoeba (hun-eng)":4.3,"Tatoeba (hye-eng)":0.65,"Tatoeba (ido-eng)":39.15,"Tatoeba (ile-eng)":56.08,"Tatoeba (ina-eng)":74.59,"Tatoeba (ind-eng)":88.3,"Tatoeba (isl-eng)":5.46,"Tatoeba (ita-eng)":55.97,"Tatoeba (jav-eng)":18.6,"Tatoeba (jpn-eng)":65.97,"Tatoeba (kab-eng)":1.14,"Tatoeba (kat-eng)":0.91,"Tatoeba (kaz-eng)":4.7,"Tatoeba (khm-eng)":0.01,"Tatoeba (kor-eng)":12.56,"Tatoeba (kur-eng)":10.65,"Tatoeba (kzj-eng)":4.78,"Tatoeba (lat-eng)":27.65,"Tatoeba (lfn-eng)":41.94,"Tatoeba (lit-eng)":4.83,"Tatoeba (lvs-eng)":5.53,"Tatoeba (mal-eng)":85.56,"Tatoeba (mar-eng)":52.67,"Tatoeba (max-eng)":39.38,"Tatoeba (mhr-eng)":2.38,"Tatoeba (mkd-eng)":6.19,"Tatoeba (mon-eng)":4.79,"Tatoeba (nds-eng)":17.76,"Tatoeba (nld-eng)":25.15,"Tatoeba (nno-eng)":13.33,"Tatoeba (nob-eng)":17.67,"Tatoeba (nov-eng)":49.14,"Tatoeba (oci-eng)":39.02,"Tatoeba (orv-eng)":3.07,"Tatoeba (pam-eng)":5.68,"Tatoeba (pes-eng)":13.51,"Tatoeba (pms-eng)":27.6,"Tatoeba (pol-eng)":9.61,"Tatoeba (por-eng)":92.52,"Tatoeba (ron-eng)":23.2,"Tatoeba (rus-eng)":40.27,"Tatoeba (slk-eng)":8.52,"Tatoeba (slv-eng)":8.1,"Tatoeba (spa-eng)":96.05,"Tatoeba (sqi-eng)":12.07,"Tatoeba (srp-eng)":8.76,"Tatoeba (swe-eng)":14.94,"Tatoeba (swg-eng)":21.63,"Tatoeba (swh-eng)":16.31,"Tatoeba (tam-eng)":77.3,"Tatoeba (tat-eng)":2.86,"Tatoeba (tel-eng)":69.05,"Tatoeba (tgl-eng)":8.63,"Tatoeba (tha-eng)":1.64,"Tatoeba (tuk-eng)":5.14,"Tatoeba (tur-eng)":5.09,"Tatoeba (tzl-eng)":31.31,"Tatoeba (uig-eng)":1.68,"Tatoeba (ukr-eng)":14.32,"Tatoeba (urd-eng)":68.96,"Tatoeba (uzb-eng)":5.15,"Tatoeba (vie-eng)":94.57,"Tatoeba (war-eng)":8.07,"Tatoeba (wuu-eng)":81.18,"Tatoeba (xho-eng)":8.3,"Tatoeba (yid-eng)":0.12,"Tatoeba (yue-eng)":81.35,"Tatoeba (zsm-eng)":81.48} -{"index":16,"Rank":16,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":24.03,"BUCC (de-en)":11.3,"BUCC (fr-en)":96.18,"BUCC (ru-en)":1.59,"BUCC (zh-en)":96.16,"Tatoeba (afr-eng)":9.01,"Tatoeba (amh-eng)":0.21,"Tatoeba (ang-eng)":13.31,"Tatoeba (ara-eng)":80.09,"Tatoeba (arq-eng)":16.16,"Tatoeba (arz-eng)":49.42,"Tatoeba (ast-eng)":59.96,"Tatoeba (awa-eng)":30.54,"Tatoeba (aze-eng)":3.44,"Tatoeba (bel-eng)":3.28,"Tatoeba (ben-eng)":70.14,"Tatoeba (ber-eng)":3.92,"Tatoeba (bos-eng)":9.17,"Tatoeba (bre-eng)":3.85,"Tatoeba (bul-eng)":4.58,"Tatoeba (cat-eng)":81.55,"Tatoeba (cbk-eng)":55.71,"Tatoeba (ceb-eng)":5.64,"Tatoeba (ces-eng)":3.76,"Tatoeba (cha-eng)":16.46,"Tatoeba (cmn-eng)":92.95,"Tatoeba (cor-eng)":3.13,"Tatoeba (csb-eng)":5.26,"Tatoeba (cym-eng)":5.87,"Tatoeba (dan-eng)":11.4,"Tatoeba (deu-eng)":29.34,"Tatoeba (dsb-eng)":5.83,"Tatoeba (dtp-eng)":3.37,"Tatoeba (ell-eng)":0.89,"Tatoeba (epo-eng)":13.42,"Tatoeba (est-eng)":3.08,"Tatoeba (eus-eng)":42.35,"Tatoeba (fao-eng)":8.3,"Tatoeba (fin-eng)":3.5,"Tatoeba (fra-eng)":91.65,"Tatoeba (fry-eng)":18.1,"Tatoeba (gla-eng)":2.73,"Tatoeba (gle-eng)":3.46,"Tatoeba (glg-eng)":69.26,"Tatoeba (gsw-eng)":17.5,"Tatoeba (heb-eng)":0.25,"Tatoeba (hin-eng)":85.35,"Tatoeba (hrv-eng)":6.27,"Tatoeba (hsb-eng)":6.8,"Tatoeba (hun-eng)":3.25,"Tatoeba (hye-eng)":0.23,"Tatoeba (ido-eng)":26.21,"Tatoeba (ile-eng)":44.24,"Tatoeba (ina-eng)":63.9,"Tatoeba (ind-eng)":85.46,"Tatoeba (isl-eng)":3.27,"Tatoeba (ita-eng)":37.29,"Tatoeba (jav-eng)":14.35,"Tatoeba (jpn-eng)":48.83,"Tatoeba (kab-eng)":0.89,"Tatoeba (kat-eng)":0.4,"Tatoeba (kaz-eng)":0.74,"Tatoeba (khm-eng)":0.15,"Tatoeba (kor-eng)":2.65,"Tatoeba (kur-eng)":6.48,"Tatoeba (kzj-eng)":3.39,"Tatoeba (lat-eng)":19.55,"Tatoeba (lfn-eng)":37.52,"Tatoeba (lit-eng)":3.17,"Tatoeba (lvs-eng)":3.53,"Tatoeba (mal-eng)":73.36,"Tatoeba (mar-eng)":47.23,"Tatoeba (max-eng)":33.46,"Tatoeba (mhr-eng)":0.44,"Tatoeba (mkd-eng)":2.77,"Tatoeba (mon-eng)":0.85,"Tatoeba (nds-eng)":11.83,"Tatoeba (nld-eng)":16.33,"Tatoeba (nno-eng)":8.05,"Tatoeba (nob-eng)":11.91,"Tatoeba (nov-eng)":38.78,"Tatoeba (oci-eng)":29.39,"Tatoeba (orv-eng)":0.54,"Tatoeba (pam-eng)":3.78,"Tatoeba (pes-eng)":5.87,"Tatoeba (pms-eng)":17.49,"Tatoeba (pol-eng)":5.63,"Tatoeba (por-eng)":92.08,"Tatoeba (ron-eng)":14.95,"Tatoeba (rus-eng)":25.22,"Tatoeba (slk-eng)":5.49,"Tatoeba (slv-eng)":5.41,"Tatoeba (spa-eng)":95.48,"Tatoeba (sqi-eng)":8.49,"Tatoeba (srp-eng)":4.55,"Tatoeba (swe-eng)":10.72,"Tatoeba (swg-eng)":16.44,"Tatoeba (swh-eng)":14.52,"Tatoeba (tam-eng)":62.26,"Tatoeba (tat-eng)":0.97,"Tatoeba (tel-eng)":35.84,"Tatoeba (tgl-eng)":6.52,"Tatoeba (tha-eng)":0.51,"Tatoeba (tuk-eng)":2.81,"Tatoeba (tur-eng)":3.49,"Tatoeba (tzl-eng)":14.52,"Tatoeba (uig-eng)":0.53,"Tatoeba (ukr-eng)":4.93,"Tatoeba (urd-eng)":61.62,"Tatoeba (uzb-eng)":2.54,"Tatoeba (vie-eng)":92.86,"Tatoeba (war-eng)":7.19,"Tatoeba (wuu-eng)":74.62,"Tatoeba (xho-eng)":4.83,"Tatoeba (yid-eng)":0.19,"Tatoeba (yue-eng)":74.35,"Tatoeba (zsm-eng)":74.89} -{"index":26,"Rank":17,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":21.98,"BUCC (de-en)":95.04,"BUCC (fr-en)":94.96,"BUCC (ru-en)":8.33,"BUCC (zh-en)":1.3,"Tatoeba (afr-eng)":41.84,"Tatoeba (amh-eng)":0.03,"Tatoeba (ang-eng)":37.87,"Tatoeba (ara-eng)":0.61,"Tatoeba (arq-eng)":0.74,"Tatoeba (arz-eng)":0.42,"Tatoeba (ast-eng)":65.41,"Tatoeba (awa-eng)":1.46,"Tatoeba (aze-eng)":8.79,"Tatoeba (bel-eng)":5.76,"Tatoeba (ben-eng)":0.01,"Tatoeba (ber-eng)":5.92,"Tatoeba (bos-eng)":16.12,"Tatoeba (bre-eng)":6.12,"Tatoeba (bul-eng)":9.06,"Tatoeba (cat-eng)":57.4,"Tatoeba (cbk-eng)":57.68,"Tatoeba (ceb-eng)":12.56,"Tatoeba (ces-eng)":9.47,"Tatoeba (cha-eng)":27.13,"Tatoeba (cmn-eng)":1.82,"Tatoeba (cor-eng)":3.87,"Tatoeba (csb-eng)":14.41,"Tatoeba (cym-eng)":6.69,"Tatoeba (dan-eng)":54.87,"Tatoeba (deu-eng)":93.72,"Tatoeba (dsb-eng)":14.74,"Tatoeba (dtp-eng)":5.84,"Tatoeba (ell-eng)":0.6,"Tatoeba (epo-eng)":30.8,"Tatoeba (est-eng)":5.39,"Tatoeba (eus-eng)":11.9,"Tatoeba (fao-eng)":28.08,"Tatoeba (fin-eng)":6.81,"Tatoeba (fra-eng)":85.29,"Tatoeba (fry-eng)":38.68,"Tatoeba (gla-eng)":2.96,"Tatoeba (gle-eng)":3.74,"Tatoeba (glg-eng)":70.0,"Tatoeba (gsw-eng)":30.49,"Tatoeba (heb-eng)":0.87,"Tatoeba (hin-eng)":0.1,"Tatoeba (hrv-eng)":17.43,"Tatoeba (hsb-eng)":14.69,"Tatoeba (hun-eng)":7.28,"Tatoeba (hye-eng)":0.77,"Tatoeba (ido-eng)":46.65,"Tatoeba (ile-eng)":59.43,"Tatoeba (ina-eng)":82.71,"Tatoeba (ind-eng)":37.26,"Tatoeba (isl-eng)":11.21,"Tatoeba (ita-eng)":79.77,"Tatoeba (jav-eng)":7.81,"Tatoeba (jpn-eng)":0.91,"Tatoeba (kab-eng)":2.23,"Tatoeba (kat-eng)":1.48,"Tatoeba (kaz-eng)":1.77,"Tatoeba (khm-eng)":0.38,"Tatoeba (kor-eng)":1.96,"Tatoeba (kur-eng)":12.11,"Tatoeba (kzj-eng)":6.13,"Tatoeba (lat-eng)":27.84,"Tatoeba (lfn-eng)":45.89,"Tatoeba (lit-eng)":5.94,"Tatoeba (lvs-eng)":8.11,"Tatoeba (mal-eng)":0.59,"Tatoeba (mar-eng)":0.03,"Tatoeba (max-eng)":21.7,"Tatoeba (mhr-eng)":0.68,"Tatoeba (mkd-eng)":5.92,"Tatoeba (mon-eng)":2.39,"Tatoeba (nds-eng)":45.04,"Tatoeba (nld-eng)":64.75,"Tatoeba (nno-eng)":36.74,"Tatoeba (nob-eng)":54.77,"Tatoeba (nov-eng)":57.12,"Tatoeba (oci-eng)":34.39,"Tatoeba (orv-eng)":2.04,"Tatoeba (pam-eng)":8.34,"Tatoeba (pes-eng)":0.87,"Tatoeba (pms-eng)":38.06,"Tatoeba (pol-eng)":28.35,"Tatoeba (por-eng)":83.61,"Tatoeba (ron-eng)":65.27,"Tatoeba (rus-eng)":30.42,"Tatoeba (slk-eng)":13.19,"Tatoeba (slv-eng)":13.49,"Tatoeba (spa-eng)":89.18,"Tatoeba (sqi-eng)":14.66,"Tatoeba (srp-eng)":13.24,"Tatoeba (swe-eng)":60.67,"Tatoeba (swg-eng)":34.76,"Tatoeba (swh-eng)":8.07,"Tatoeba (tam-eng)":0.36,"Tatoeba (tat-eng)":1.46,"Tatoeba (tel-eng)":0.67,"Tatoeba (tgl-eng)":25.22,"Tatoeba (tha-eng)":1.58,"Tatoeba (tuk-eng)":4.99,"Tatoeba (tur-eng)":7.72,"Tatoeba (tzl-eng)":38.49,"Tatoeba (uig-eng)":0.87,"Tatoeba (ukr-eng)":9.12,"Tatoeba (urd-eng)":0.0,"Tatoeba (uzb-eng)":5.48,"Tatoeba (vie-eng)":8.45,"Tatoeba (war-eng)":13.75,"Tatoeba (wuu-eng)":1.44,"Tatoeba (xho-eng)":9.15,"Tatoeba (yid-eng)":0.28,"Tatoeba (yue-eng)":0.98,"Tatoeba (zsm-eng)":35.71} -{"index":22,"Rank":18,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":19.48,"BUCC (de-en)":90.99,"BUCC (fr-en)":88.55,"BUCC (ru-en)":2.07,"BUCC (zh-en)":1.49,"Tatoeba (afr-eng)":33.47,"Tatoeba (amh-eng)":0.01,"Tatoeba (ang-eng)":30.74,"Tatoeba (ara-eng)":0.47,"Tatoeba (arq-eng)":0.34,"Tatoeba (arz-eng)":0.14,"Tatoeba (ast-eng)":51.74,"Tatoeba (awa-eng)":0.49,"Tatoeba (aze-eng)":7.43,"Tatoeba (bel-eng)":3.45,"Tatoeba (ben-eng)":0.06,"Tatoeba (ber-eng)":5.79,"Tatoeba (bos-eng)":17.43,"Tatoeba (bre-eng)":5.69,"Tatoeba (bul-eng)":7.55,"Tatoeba (cat-eng)":48.06,"Tatoeba (cbk-eng)":54.56,"Tatoeba (ceb-eng)":8.72,"Tatoeba (ces-eng)":8.76,"Tatoeba (cha-eng)":27.56,"Tatoeba (cmn-eng)":2.26,"Tatoeba (cor-eng)":3.69,"Tatoeba (csb-eng)":13.18,"Tatoeba (cym-eng)":6.97,"Tatoeba (dan-eng)":47.36,"Tatoeba (deu-eng)":91.54,"Tatoeba (dsb-eng)":13.2,"Tatoeba (dtp-eng)":4.54,"Tatoeba (ell-eng)":0.55,"Tatoeba (epo-eng)":27.86,"Tatoeba (est-eng)":5.13,"Tatoeba (eus-eng)":10.23,"Tatoeba (fao-eng)":21.44,"Tatoeba (fin-eng)":6.62,"Tatoeba (fra-eng)":79.66,"Tatoeba (fry-eng)":32.92,"Tatoeba (gla-eng)":2.87,"Tatoeba (gle-eng)":3.26,"Tatoeba (glg-eng)":63.81,"Tatoeba (gsw-eng)":29.71,"Tatoeba (heb-eng)":0.33,"Tatoeba (hin-eng)":0.25,"Tatoeba (hrv-eng)":17.16,"Tatoeba (hsb-eng)":12.02,"Tatoeba (hun-eng)":7.21,"Tatoeba (hye-eng)":0.78,"Tatoeba (ido-eng)":40.83,"Tatoeba (ile-eng)":54.95,"Tatoeba (ina-eng)":72.28,"Tatoeba (ind-eng)":30.95,"Tatoeba (isl-eng)":11.29,"Tatoeba (ita-eng)":73.83,"Tatoeba (jav-eng)":8.66,"Tatoeba (jpn-eng)":0.61,"Tatoeba (kab-eng)":1.78,"Tatoeba (kat-eng)":0.79,"Tatoeba (kaz-eng)":0.95,"Tatoeba (khm-eng)":0.49,"Tatoeba (kor-eng)":1.87,"Tatoeba (kur-eng)":10.91,"Tatoeba (kzj-eng)":5.72,"Tatoeba (lat-eng)":18.24,"Tatoeba (lfn-eng)":43.49,"Tatoeba (lit-eng)":7.13,"Tatoeba (lvs-eng)":7.04,"Tatoeba (mal-eng)":0.44,"Tatoeba (mar-eng)":0.03,"Tatoeba (max-eng)":18.99,"Tatoeba (mhr-eng)":1.11,"Tatoeba (mkd-eng)":2.49,"Tatoeba (mon-eng)":2.01,"Tatoeba (nds-eng)":39.96,"Tatoeba (nld-eng)":58.86,"Tatoeba (nno-eng)":29.07,"Tatoeba (nob-eng)":40.25,"Tatoeba (nov-eng)":50.19,"Tatoeba (oci-eng)":30.72,"Tatoeba (orv-eng)":0.85,"Tatoeba (pam-eng)":7.21,"Tatoeba (pes-eng)":0.53,"Tatoeba (pms-eng)":31.07,"Tatoeba (pol-eng)":18.06,"Tatoeba (por-eng)":81.92,"Tatoeba (ron-eng)":62.6,"Tatoeba (rus-eng)":22.24,"Tatoeba (slk-eng)":10.59,"Tatoeba (slv-eng)":11.4,"Tatoeba (spa-eng)":85.78,"Tatoeba (sqi-eng)":14.92,"Tatoeba (srp-eng)":9.87,"Tatoeba (swe-eng)":55.08,"Tatoeba (swg-eng)":32.66,"Tatoeba (swh-eng)":7.64,"Tatoeba (tam-eng)":0.49,"Tatoeba (tat-eng)":1.28,"Tatoeba (tel-eng)":0.45,"Tatoeba (tgl-eng)":23.63,"Tatoeba (tha-eng)":0.61,"Tatoeba (tuk-eng)":5.71,"Tatoeba (tur-eng)":8.25,"Tatoeba (tzl-eng)":28.4,"Tatoeba (uig-eng)":0.57,"Tatoeba (ukr-eng)":5.69,"Tatoeba (urd-eng)":0.0,"Tatoeba (uzb-eng)":4.19,"Tatoeba (vie-eng)":9.07,"Tatoeba (war-eng)":12.31,"Tatoeba (wuu-eng)":1.38,"Tatoeba (xho-eng)":7.6,"Tatoeba (yid-eng)":0.41,"Tatoeba (yue-eng)":1.31,"Tatoeba (zsm-eng)":29.74} -{"index":1,"Rank":19,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":18.78,"BUCC (de-en)":9.26,"BUCC (fr-en)":17.41,"BUCC (ru-en)":51.97,"BUCC (zh-en)":88.7,"Tatoeba (afr-eng)":13.22,"Tatoeba (amh-eng)":5.75,"Tatoeba (ang-eng)":13.26,"Tatoeba (ara-eng)":19.56,"Tatoeba (arq-eng)":3.99,"Tatoeba (arz-eng)":11.42,"Tatoeba (ast-eng)":17.74,"Tatoeba (awa-eng)":16.75,"Tatoeba (aze-eng)":8.55,"Tatoeba (bel-eng)":24.34,"Tatoeba (ben-eng)":23.45,"Tatoeba (ber-eng)":2.49,"Tatoeba (bos-eng)":15.81,"Tatoeba (bre-eng)":3.22,"Tatoeba (bul-eng)":42.18,"Tatoeba (cat-eng)":15.96,"Tatoeba (cbk-eng)":10.95,"Tatoeba (ceb-eng)":4.8,"Tatoeba (ces-eng)":11.89,"Tatoeba (cha-eng)":9.11,"Tatoeba (cmn-eng)":86.26,"Tatoeba (cor-eng)":1.94,"Tatoeba (csb-eng)":7.17,"Tatoeba (cym-eng)":5.64,"Tatoeba (dan-eng)":26.0,"Tatoeba (deu-eng)":20.2,"Tatoeba (dsb-eng)":4.49,"Tatoeba (dtp-eng)":1.76,"Tatoeba (ell-eng)":9.5,"Tatoeba (epo-eng)":8.25,"Tatoeba (est-eng)":5.45,"Tatoeba (eus-eng)":7.38,"Tatoeba (fao-eng)":6.34,"Tatoeba (fin-eng)":13.53,"Tatoeba (fra-eng)":19.96,"Tatoeba (fry-eng)":19.03,"Tatoeba (gla-eng)":2.68,"Tatoeba (gle-eng)":3.65,"Tatoeba (glg-eng)":19.32,"Tatoeba (gsw-eng)":11.69,"Tatoeba (heb-eng)":30.26,"Tatoeba (hin-eng)":49.45,"Tatoeba (hrv-eng)":10.05,"Tatoeba (hsb-eng)":6.44,"Tatoeba (hun-eng)":14.77,"Tatoeba (hye-eng)":23.13,"Tatoeba (ido-eng)":11.78,"Tatoeba (ile-eng)":17.24,"Tatoeba (ina-eng)":23.22,"Tatoeba (ind-eng)":32.13,"Tatoeba (isl-eng)":7.49,"Tatoeba (ita-eng)":15.98,"Tatoeba (jav-eng)":6.74,"Tatoeba (jpn-eng)":52.34,"Tatoeba (kab-eng)":0.6,"Tatoeba (kat-eng)":33.0,"Tatoeba (kaz-eng)":16.97,"Tatoeba (khm-eng)":15.33,"Tatoeba (kor-eng)":48.19,"Tatoeba (kur-eng)":6.11,"Tatoeba (kzj-eng)":2.34,"Tatoeba (lat-eng)":7.87,"Tatoeba (lfn-eng)":8.76,"Tatoeba (lit-eng)":8.13,"Tatoeba (lvs-eng)":7.4,"Tatoeba (mal-eng)":36.61,"Tatoeba (mar-eng)":28.56,"Tatoeba (max-eng)":13.73,"Tatoeba (mhr-eng)":3.52,"Tatoeba (mkd-eng)":16.69,"Tatoeba (mon-eng)":24.85,"Tatoeba (nds-eng)":14.99,"Tatoeba (nld-eng)":29.41,"Tatoeba (nno-eng)":13.8,"Tatoeba (nob-eng)":27.89,"Tatoeba (nov-eng)":25.22,"Tatoeba (oci-eng)":6.69,"Tatoeba (orv-eng)":3.11,"Tatoeba (pam-eng)":3.09,"Tatoeba (pes-eng)":51.06,"Tatoeba (pms-eng)":9.27,"Tatoeba (pol-eng)":14.51,"Tatoeba (por-eng)":28.6,"Tatoeba (ron-eng)":17.36,"Tatoeba (rus-eng)":60.68,"Tatoeba (slk-eng)":14.05,"Tatoeba (slv-eng)":10.17,"Tatoeba (spa-eng)":28.19,"Tatoeba (sqi-eng)":15.83,"Tatoeba (srp-eng)":12.92,"Tatoeba (swe-eng)":21.75,"Tatoeba (swg-eng)":10.95,"Tatoeba (swh-eng)":6.37,"Tatoeba (tam-eng)":26.72,"Tatoeba (tat-eng)":2.51,"Tatoeba (tel-eng)":23.69,"Tatoeba (tgl-eng)":5.76,"Tatoeba (tha-eng)":60.21,"Tatoeba (tuk-eng)":4.02,"Tatoeba (tur-eng)":9.38,"Tatoeba (tzl-eng)":20.61,"Tatoeba (uig-eng)":2.65,"Tatoeba (ukr-eng)":39.8,"Tatoeba (urd-eng)":24.79,"Tatoeba (uzb-eng)":3.53,"Tatoeba (vie-eng)":49.56,"Tatoeba (war-eng)":4.84,"Tatoeba (wuu-eng)":73.25,"Tatoeba (xho-eng)":8.03,"Tatoeba (yid-eng)":2.26,"Tatoeba (yue-eng)":58.35,"Tatoeba (zsm-eng)":30.9} -{"index":25,"Rank":20,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":17.26,"BUCC (de-en)":87.0,"BUCC (fr-en)":88.91,"BUCC (ru-en)":0.44,"BUCC (zh-en)":0.95,"Tatoeba (afr-eng)":23.7,"Tatoeba (amh-eng)":0.65,"Tatoeba (ang-eng)":30.98,"Tatoeba (ara-eng)":0.48,"Tatoeba (arq-eng)":0.68,"Tatoeba (arz-eng)":0.22,"Tatoeba (ast-eng)":55.3,"Tatoeba (awa-eng)":1.03,"Tatoeba (aze-eng)":5.83,"Tatoeba (bel-eng)":1.66,"Tatoeba (ben-eng)":0.0,"Tatoeba (ber-eng)":5.62,"Tatoeba (bos-eng)":12.23,"Tatoeba (bre-eng)":5.84,"Tatoeba (bul-eng)":1.35,"Tatoeba (cat-eng)":48.56,"Tatoeba (cbk-eng)":46.97,"Tatoeba (ceb-eng)":9.79,"Tatoeba (ces-eng)":6.0,"Tatoeba (cha-eng)":24.21,"Tatoeba (cmn-eng)":2.26,"Tatoeba (cor-eng)":4.03,"Tatoeba (csb-eng)":9.53,"Tatoeba (cym-eng)":9.17,"Tatoeba (dan-eng)":34.63,"Tatoeba (deu-eng)":89.31,"Tatoeba (dsb-eng)":9.68,"Tatoeba (dtp-eng)":4.66,"Tatoeba (ell-eng)":0.77,"Tatoeba (epo-eng)":26.88,"Tatoeba (est-eng)":5.19,"Tatoeba (eus-eng)":9.46,"Tatoeba (fao-eng)":21.59,"Tatoeba (fin-eng)":5.66,"Tatoeba (fra-eng)":79.71,"Tatoeba (fry-eng)":28.29,"Tatoeba (gla-eng)":2.34,"Tatoeba (gle-eng)":3.55,"Tatoeba (glg-eng)":56.25,"Tatoeba (gsw-eng)":24.25,"Tatoeba (heb-eng)":0.57,"Tatoeba (hin-eng)":0.12,"Tatoeba (hrv-eng)":10.29,"Tatoeba (hsb-eng)":9.52,"Tatoeba (hun-eng)":6.22,"Tatoeba (hye-eng)":0.81,"Tatoeba (ido-eng)":41.11,"Tatoeba (ile-eng)":54.0,"Tatoeba (ina-eng)":75.47,"Tatoeba (ind-eng)":13.02,"Tatoeba (isl-eng)":8.98,"Tatoeba (ita-eng)":67.23,"Tatoeba (jav-eng)":8.54,"Tatoeba (jpn-eng)":0.99,"Tatoeba (kab-eng)":1.85,"Tatoeba (kat-eng)":1.37,"Tatoeba (kaz-eng)":0.67,"Tatoeba (khm-eng)":0.56,"Tatoeba (kor-eng)":1.73,"Tatoeba (kur-eng)":9.23,"Tatoeba (kzj-eng)":5.38,"Tatoeba (lat-eng)":21.3,"Tatoeba (lfn-eng)":40.48,"Tatoeba (lit-eng)":5.38,"Tatoeba (lvs-eng)":6.83,"Tatoeba (mal-eng)":0.45,"Tatoeba (mar-eng)":0.01,"Tatoeba (max-eng)":16.44,"Tatoeba (mhr-eng)":0.33,"Tatoeba (mkd-eng)":0.4,"Tatoeba (mon-eng)":2.48,"Tatoeba (nds-eng)":34.66,"Tatoeba (nld-eng)":42.72,"Tatoeba (nno-eng)":24.08,"Tatoeba (nob-eng)":34.17,"Tatoeba (nov-eng)":55.01,"Tatoeba (oci-eng)":29.15,"Tatoeba (orv-eng)":0.2,"Tatoeba (pam-eng)":6.99,"Tatoeba (pes-eng)":0.9,"Tatoeba (pms-eng)":30.8,"Tatoeba (pol-eng)":12.81,"Tatoeba (por-eng)":73.45,"Tatoeba (ron-eng)":54.86,"Tatoeba (rus-eng)":2.43,"Tatoeba (slk-eng)":8.35,"Tatoeba (slv-eng)":9.3,"Tatoeba (spa-eng)":78.87,"Tatoeba (sqi-eng)":11.74,"Tatoeba (srp-eng)":5.83,"Tatoeba (swe-eng)":35.41,"Tatoeba (swg-eng)":28.18,"Tatoeba (swh-eng)":7.53,"Tatoeba (tam-eng)":0.36,"Tatoeba (tat-eng)":1.01,"Tatoeba (tel-eng)":1.1,"Tatoeba (tgl-eng)":12.4,"Tatoeba (tha-eng)":1.58,"Tatoeba (tuk-eng)":4.95,"Tatoeba (tur-eng)":6.45,"Tatoeba (tzl-eng)":37.82,"Tatoeba (uig-eng)":0.67,"Tatoeba (ukr-eng)":1.88,"Tatoeba (urd-eng)":0.0,"Tatoeba (uzb-eng)":4.79,"Tatoeba (vie-eng)":7.03,"Tatoeba (war-eng)":9.68,"Tatoeba (wuu-eng)":1.28,"Tatoeba (xho-eng)":10.64,"Tatoeba (yid-eng)":0.57,"Tatoeba (yue-eng)":0.88,"Tatoeba (zsm-eng)":14.67} -{"index":6,"Rank":21,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":6.61,"BUCC (de-en)":14.99,"BUCC (fr-en)":32.42,"BUCC (ru-en)":0.18,"BUCC (zh-en)":2.76,"Tatoeba (afr-eng)":8.05,"Tatoeba (amh-eng)":0.67,"Tatoeba (ang-eng)":17.0,"Tatoeba (ara-eng)":0.3,"Tatoeba (arq-eng)":0.26,"Tatoeba (arz-eng)":0.0,"Tatoeba (ast-eng)":17.93,"Tatoeba (awa-eng)":0.29,"Tatoeba (aze-eng)":3.26,"Tatoeba (bel-eng)":0.96,"Tatoeba (ben-eng)":0.07,"Tatoeba (ber-eng)":5.21,"Tatoeba (bos-eng)":7.18,"Tatoeba (bre-eng)":3.81,"Tatoeba (bul-eng)":0.99,"Tatoeba (cat-eng)":15.52,"Tatoeba (cbk-eng)":14.09,"Tatoeba (ceb-eng)":4.59,"Tatoeba (ces-eng)":4.73,"Tatoeba (cha-eng)":14.16,"Tatoeba (cmn-eng)":2.89,"Tatoeba (cor-eng)":3.05,"Tatoeba (csb-eng)":6.64,"Tatoeba (cym-eng)":6.45,"Tatoeba (dan-eng)":10.06,"Tatoeba (deu-eng)":18.14,"Tatoeba (dsb-eng)":3.81,"Tatoeba (dtp-eng)":2.73,"Tatoeba (ell-eng)":0.5,"Tatoeba (epo-eng)":10.98,"Tatoeba (est-eng)":3.5,"Tatoeba (eus-eng)":7.35,"Tatoeba (fao-eng)":8.13,"Tatoeba (fin-eng)":3.62,"Tatoeba (fra-eng)":21.53,"Tatoeba (fry-eng)":14.62,"Tatoeba (gla-eng)":2.82,"Tatoeba (gle-eng)":2.38,"Tatoeba (glg-eng)":18.49,"Tatoeba (gsw-eng)":13.55,"Tatoeba (heb-eng)":0.1,"Tatoeba (hin-eng)":0.0,"Tatoeba (hrv-eng)":5.52,"Tatoeba (hsb-eng)":4.08,"Tatoeba (hun-eng)":4.68,"Tatoeba (hye-eng)":0.4,"Tatoeba (ido-eng)":17.46,"Tatoeba (ile-eng)":20.98,"Tatoeba (ina-eng)":31.39,"Tatoeba (ind-eng)":8.37,"Tatoeba (isl-eng)":4.15,"Tatoeba (ita-eng)":18.5,"Tatoeba (jav-eng)":5.54,"Tatoeba (jpn-eng)":1.58,"Tatoeba (kab-eng)":1.02,"Tatoeba (kat-eng)":0.28,"Tatoeba (kaz-eng)":0.58,"Tatoeba (khm-eng)":0.51,"Tatoeba (kor-eng)":0.5,"Tatoeba (kur-eng)":6.31,"Tatoeba (kzj-eng)":3.6,"Tatoeba (lat-eng)":11.38,"Tatoeba (lfn-eng)":12.55,"Tatoeba (lit-eng)":2.48,"Tatoeba (lvs-eng)":4.88,"Tatoeba (mal-eng)":0.4,"Tatoeba (mar-eng)":0.0,"Tatoeba (max-eng)":7.27,"Tatoeba (mhr-eng)":0.1,"Tatoeba (mkd-eng)":0.15,"Tatoeba (mon-eng)":1.44,"Tatoeba (nds-eng)":13.34,"Tatoeba (nld-eng)":12.37,"Tatoeba (nno-eng)":7.02,"Tatoeba (nob-eng)":10.3,"Tatoeba (nov-eng)":30.33,"Tatoeba (oci-eng)":11.98,"Tatoeba (orv-eng)":0.0,"Tatoeba (pam-eng)":4.85,"Tatoeba (pes-eng)":0.2,"Tatoeba (pms-eng)":11.4,"Tatoeba (pol-eng)":6.57,"Tatoeba (por-eng)":18.86,"Tatoeba (ron-eng)":10.94,"Tatoeba (rus-eng)":0.25,"Tatoeba (slk-eng)":5.63,"Tatoeba (slv-eng)":4.38,"Tatoeba (spa-eng)":18.97,"Tatoeba (sqi-eng)":6.3,"Tatoeba (srp-eng)":2.98,"Tatoeba (swe-eng)":8.47,"Tatoeba (swg-eng)":9.33,"Tatoeba (swh-eng)":6.9,"Tatoeba (tam-eng)":0.51,"Tatoeba (tat-eng)":0.81,"Tatoeba (tel-eng)":0.51,"Tatoeba (tgl-eng)":3.22,"Tatoeba (tha-eng)":1.0,"Tatoeba (tuk-eng)":3.48,"Tatoeba (tur-eng)":4.22,"Tatoeba (tzl-eng)":14.76,"Tatoeba (uig-eng)":0.21,"Tatoeba (ukr-eng)":0.78,"Tatoeba (urd-eng)":0.0,"Tatoeba (uzb-eng)":3.19,"Tatoeba (vie-eng)":5.28,"Tatoeba (war-eng)":5.48,"Tatoeba (wuu-eng)":2.62,"Tatoeba (xho-eng)":2.17,"Tatoeba (yid-eng)":0.3,"Tatoeba (yue-eng)":2.15,"Tatoeba (zsm-eng)":7.56} -{"index":21,"Rank":22,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":4.54,"BUCC (de-en)":0.18,"BUCC (fr-en)":0.08,"BUCC (ru-en)":0.15,"BUCC (zh-en)":0.05,"Tatoeba (afr-eng)":4.82,"Tatoeba (amh-eng)":1.18,"Tatoeba (ang-eng)":8.54,"Tatoeba (ara-eng)":0.63,"Tatoeba (arq-eng)":0.4,"Tatoeba (arz-eng)":0.63,"Tatoeba (ast-eng)":11.69,"Tatoeba (awa-eng)":0.0,"Tatoeba (aze-eng)":3.22,"Tatoeba (bel-eng)":1.75,"Tatoeba (ben-eng)":0.2,"Tatoeba (ber-eng)":7.0,"Tatoeba (bos-eng)":9.31,"Tatoeba (bre-eng)":4.17,"Tatoeba (bul-eng)":1.29,"Tatoeba (cat-eng)":7.73,"Tatoeba (cbk-eng)":5.61,"Tatoeba (ceb-eng)":4.88,"Tatoeba (ces-eng)":3.55,"Tatoeba (cha-eng)":19.29,"Tatoeba (cmn-eng)":0.5,"Tatoeba (cor-eng)":4.15,"Tatoeba (csb-eng)":5.69,"Tatoeba (cym-eng)":8.4,"Tatoeba (dan-eng)":6.99,"Tatoeba (deu-eng)":3.67,"Tatoeba (dsb-eng)":5.33,"Tatoeba (dtp-eng)":4.25,"Tatoeba (ell-eng)":0.63,"Tatoeba (epo-eng)":2.45,"Tatoeba (est-eng)":2.69,"Tatoeba (eus-eng)":4.69,"Tatoeba (fao-eng)":7.61,"Tatoeba (fin-eng)":3.36,"Tatoeba (fra-eng)":7.0,"Tatoeba (fry-eng)":12.36,"Tatoeba (gla-eng)":3.07,"Tatoeba (gle-eng)":4.81,"Tatoeba (glg-eng)":8.12,"Tatoeba (gsw-eng)":18.87,"Tatoeba (heb-eng)":0.68,"Tatoeba (hin-eng)":0.1,"Tatoeba (hrv-eng)":5.41,"Tatoeba (hsb-eng)":6.32,"Tatoeba (hun-eng)":3.42,"Tatoeba (hye-eng)":0.97,"Tatoeba (ido-eng)":7.1,"Tatoeba (ile-eng)":13.61,"Tatoeba (ina-eng)":8.57,"Tatoeba (ind-eng)":7.26,"Tatoeba (isl-eng)":4.09,"Tatoeba (ita-eng)":5.54,"Tatoeba (jav-eng)":11.43,"Tatoeba (jpn-eng)":0.2,"Tatoeba (kab-eng)":2.71,"Tatoeba (kat-eng)":1.11,"Tatoeba (kaz-eng)":1.17,"Tatoeba (khm-eng)":0.55,"Tatoeba (kor-eng)":0.5,"Tatoeba (kur-eng)":8.55,"Tatoeba (kzj-eng)":4.61,"Tatoeba (lat-eng)":4.07,"Tatoeba (lfn-eng)":2.83,"Tatoeba (lit-eng)":0.95,"Tatoeba (lvs-eng)":3.25,"Tatoeba (mal-eng)":0.29,"Tatoeba (mar-eng)":0.2,"Tatoeba (max-eng)":14.53,"Tatoeba (mhr-eng)":0.2,"Tatoeba (mkd-eng)":0.2,"Tatoeba (mon-eng)":1.1,"Tatoeba (nds-eng)":10.37,"Tatoeba (nld-eng)":9.5,"Tatoeba (nno-eng)":4.49,"Tatoeba (nob-eng)":4.95,"Tatoeba (nov-eng)":14.53,"Tatoeba (oci-eng)":5.8,"Tatoeba (orv-eng)":0.24,"Tatoeba (pam-eng)":6.65,"Tatoeba (pes-eng)":0.5,"Tatoeba (pms-eng)":8.05,"Tatoeba (pol-eng)":5.13,"Tatoeba (por-eng)":5.87,"Tatoeba (ron-eng)":6.76,"Tatoeba (rus-eng)":0.2,"Tatoeba (slk-eng)":4.23,"Tatoeba (slv-eng)":6.05,"Tatoeba (spa-eng)":5.03,"Tatoeba (sqi-eng)":4.36,"Tatoeba (srp-eng)":1.77,"Tatoeba (swe-eng)":6.72,"Tatoeba (swg-eng)":8.54,"Tatoeba (swh-eng)":11.49,"Tatoeba (tam-eng)":1.3,"Tatoeba (tat-eng)":0.77,"Tatoeba (tel-eng)":0.85,"Tatoeba (tgl-eng)":2.61,"Tatoeba (tha-eng)":0.69,"Tatoeba (tuk-eng)":5.76,"Tatoeba (tur-eng)":5.24,"Tatoeba (tzl-eng)":15.51,"Tatoeba (uig-eng)":0.6,"Tatoeba (ukr-eng)":1.23,"Tatoeba (urd-eng)":0.4,"Tatoeba (uzb-eng)":4.73,"Tatoeba (vie-eng)":6.55,"Tatoeba (war-eng)":4.12,"Tatoeba (wuu-eng)":0.2,"Tatoeba (xho-eng)":4.33,"Tatoeba (yid-eng)":0.59,"Tatoeba (yue-eng)":0.5,"Tatoeba (zsm-eng)":7.27} -{"index":2,"Rank":23,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BUCC (de-en)":"","BUCC (fr-en)":"","BUCC (ru-en)":"","BUCC (zh-en)":9.35,"Tatoeba (afr-eng)":"","Tatoeba (amh-eng)":"","Tatoeba (ang-eng)":"","Tatoeba (ara-eng)":"","Tatoeba (arq-eng)":"","Tatoeba (arz-eng)":"","Tatoeba (ast-eng)":"","Tatoeba (awa-eng)":"","Tatoeba (aze-eng)":"","Tatoeba (bel-eng)":"","Tatoeba (ben-eng)":"","Tatoeba (ber-eng)":"","Tatoeba (bos-eng)":"","Tatoeba (bre-eng)":"","Tatoeba (bul-eng)":"","Tatoeba (cat-eng)":"","Tatoeba (cbk-eng)":"","Tatoeba (ceb-eng)":"","Tatoeba (ces-eng)":"","Tatoeba (cha-eng)":"","Tatoeba (cmn-eng)":"","Tatoeba (cor-eng)":"","Tatoeba (csb-eng)":"","Tatoeba (cym-eng)":"","Tatoeba (dan-eng)":"","Tatoeba (deu-eng)":"","Tatoeba (dsb-eng)":"","Tatoeba (dtp-eng)":"","Tatoeba (ell-eng)":"","Tatoeba (epo-eng)":"","Tatoeba (est-eng)":"","Tatoeba (eus-eng)":"","Tatoeba (fao-eng)":"","Tatoeba (fin-eng)":"","Tatoeba (fra-eng)":"","Tatoeba (fry-eng)":"","Tatoeba (gla-eng)":"","Tatoeba (gle-eng)":"","Tatoeba (glg-eng)":"","Tatoeba (gsw-eng)":"","Tatoeba (heb-eng)":"","Tatoeba (hin-eng)":"","Tatoeba (hrv-eng)":"","Tatoeba (hsb-eng)":"","Tatoeba (hun-eng)":"","Tatoeba (hye-eng)":"","Tatoeba (ido-eng)":"","Tatoeba (ile-eng)":"","Tatoeba (ina-eng)":"","Tatoeba (ind-eng)":"","Tatoeba (isl-eng)":"","Tatoeba (ita-eng)":"","Tatoeba (jav-eng)":"","Tatoeba (jpn-eng)":"","Tatoeba (kab-eng)":"","Tatoeba (kat-eng)":"","Tatoeba (kaz-eng)":"","Tatoeba (khm-eng)":"","Tatoeba (kor-eng)":"","Tatoeba (kur-eng)":"","Tatoeba (kzj-eng)":"","Tatoeba (lat-eng)":"","Tatoeba (lfn-eng)":"","Tatoeba (lit-eng)":"","Tatoeba (lvs-eng)":"","Tatoeba (mal-eng)":"","Tatoeba (mar-eng)":"","Tatoeba (max-eng)":"","Tatoeba (mhr-eng)":"","Tatoeba (mkd-eng)":"","Tatoeba (mon-eng)":"","Tatoeba (nds-eng)":"","Tatoeba (nld-eng)":"","Tatoeba (nno-eng)":"","Tatoeba (nob-eng)":"","Tatoeba (nov-eng)":"","Tatoeba (oci-eng)":"","Tatoeba (orv-eng)":"","Tatoeba (pam-eng)":"","Tatoeba (pes-eng)":"","Tatoeba (pms-eng)":"","Tatoeba (pol-eng)":"","Tatoeba (por-eng)":"","Tatoeba (ron-eng)":"","Tatoeba (rus-eng)":"","Tatoeba (slk-eng)":"","Tatoeba (slv-eng)":"","Tatoeba (spa-eng)":"","Tatoeba (sqi-eng)":"","Tatoeba (srp-eng)":"","Tatoeba (swe-eng)":"","Tatoeba (swg-eng)":"","Tatoeba (swh-eng)":"","Tatoeba (tam-eng)":"","Tatoeba (tat-eng)":"","Tatoeba (tel-eng)":"","Tatoeba (tgl-eng)":"","Tatoeba (tha-eng)":"","Tatoeba (tuk-eng)":"","Tatoeba (tur-eng)":"","Tatoeba (tzl-eng)":"","Tatoeba (uig-eng)":"","Tatoeba (ukr-eng)":"","Tatoeba (urd-eng)":"","Tatoeba (uzb-eng)":"","Tatoeba (vie-eng)":"","Tatoeba (war-eng)":"","Tatoeba (wuu-eng)":"","Tatoeba (xho-eng)":"","Tatoeba (yid-eng)":"","Tatoeba (yue-eng)":"","Tatoeba (zsm-eng)":""} -{"index":3,"Rank":24,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BUCC (de-en)":1.09,"BUCC (fr-en)":0.02,"BUCC (ru-en)":0.0,"BUCC (zh-en)":0.0,"Tatoeba (afr-eng)":"","Tatoeba (amh-eng)":"","Tatoeba (ang-eng)":"","Tatoeba (ara-eng)":"","Tatoeba (arq-eng)":"","Tatoeba (arz-eng)":"","Tatoeba (ast-eng)":"","Tatoeba (awa-eng)":"","Tatoeba (aze-eng)":"","Tatoeba (bel-eng)":"","Tatoeba (ben-eng)":"","Tatoeba (ber-eng)":"","Tatoeba (bos-eng)":"","Tatoeba (bre-eng)":"","Tatoeba (bul-eng)":"","Tatoeba (cat-eng)":"","Tatoeba (cbk-eng)":"","Tatoeba (ceb-eng)":"","Tatoeba (ces-eng)":"","Tatoeba (cha-eng)":"","Tatoeba (cmn-eng)":"","Tatoeba (cor-eng)":"","Tatoeba (csb-eng)":"","Tatoeba (cym-eng)":"","Tatoeba (dan-eng)":"","Tatoeba (deu-eng)":"","Tatoeba (dsb-eng)":"","Tatoeba (dtp-eng)":"","Tatoeba (ell-eng)":"","Tatoeba (epo-eng)":"","Tatoeba (est-eng)":"","Tatoeba (eus-eng)":"","Tatoeba (fao-eng)":"","Tatoeba (fin-eng)":"","Tatoeba (fra-eng)":"","Tatoeba (fry-eng)":"","Tatoeba (gla-eng)":"","Tatoeba (gle-eng)":"","Tatoeba (glg-eng)":"","Tatoeba (gsw-eng)":"","Tatoeba (heb-eng)":"","Tatoeba (hin-eng)":"","Tatoeba (hrv-eng)":"","Tatoeba (hsb-eng)":"","Tatoeba (hun-eng)":"","Tatoeba (hye-eng)":"","Tatoeba (ido-eng)":"","Tatoeba (ile-eng)":"","Tatoeba (ina-eng)":"","Tatoeba (ind-eng)":"","Tatoeba (isl-eng)":"","Tatoeba (ita-eng)":"","Tatoeba (jav-eng)":"","Tatoeba (jpn-eng)":"","Tatoeba (kab-eng)":"","Tatoeba (kat-eng)":"","Tatoeba (kaz-eng)":"","Tatoeba (khm-eng)":"","Tatoeba (kor-eng)":"","Tatoeba (kur-eng)":"","Tatoeba (kzj-eng)":"","Tatoeba (lat-eng)":"","Tatoeba (lfn-eng)":"","Tatoeba (lit-eng)":"","Tatoeba (lvs-eng)":"","Tatoeba (mal-eng)":"","Tatoeba (mar-eng)":"","Tatoeba (max-eng)":"","Tatoeba (mhr-eng)":"","Tatoeba (mkd-eng)":"","Tatoeba (mon-eng)":"","Tatoeba (nds-eng)":"","Tatoeba (nld-eng)":"","Tatoeba (nno-eng)":"","Tatoeba (nob-eng)":"","Tatoeba (nov-eng)":"","Tatoeba (oci-eng)":"","Tatoeba (orv-eng)":"","Tatoeba (pam-eng)":"","Tatoeba (pes-eng)":"","Tatoeba (pms-eng)":"","Tatoeba (pol-eng)":"","Tatoeba (por-eng)":"","Tatoeba (ron-eng)":"","Tatoeba (rus-eng)":"","Tatoeba (slk-eng)":"","Tatoeba (slv-eng)":"","Tatoeba (spa-eng)":"","Tatoeba (sqi-eng)":"","Tatoeba (srp-eng)":"","Tatoeba (swe-eng)":"","Tatoeba (swg-eng)":"","Tatoeba (swh-eng)":"","Tatoeba (tam-eng)":"","Tatoeba (tat-eng)":"","Tatoeba (tel-eng)":"","Tatoeba (tgl-eng)":"","Tatoeba (tha-eng)":"","Tatoeba (tuk-eng)":"","Tatoeba (tur-eng)":"","Tatoeba (tzl-eng)":"","Tatoeba (uig-eng)":"","Tatoeba (ukr-eng)":"","Tatoeba (urd-eng)":"","Tatoeba (uzb-eng)":"","Tatoeba (vie-eng)":"","Tatoeba (war-eng)":"","Tatoeba (wuu-eng)":"","Tatoeba (xho-eng)":"","Tatoeba (yid-eng)":"","Tatoeba (yue-eng)":"","Tatoeba (zsm-eng)":""} -{"index":4,"Rank":25,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BUCC (de-en)":75.15,"BUCC (fr-en)":0.42,"BUCC (ru-en)":0.01,"BUCC (zh-en)":0.32,"Tatoeba (afr-eng)":"","Tatoeba (amh-eng)":"","Tatoeba (ang-eng)":"","Tatoeba (ara-eng)":"","Tatoeba (arq-eng)":"","Tatoeba (arz-eng)":"","Tatoeba (ast-eng)":"","Tatoeba (awa-eng)":"","Tatoeba (aze-eng)":"","Tatoeba (bel-eng)":"","Tatoeba (ben-eng)":"","Tatoeba (ber-eng)":"","Tatoeba (bos-eng)":"","Tatoeba (bre-eng)":"","Tatoeba (bul-eng)":"","Tatoeba (cat-eng)":"","Tatoeba (cbk-eng)":"","Tatoeba (ceb-eng)":"","Tatoeba (ces-eng)":"","Tatoeba (cha-eng)":"","Tatoeba (cmn-eng)":"","Tatoeba (cor-eng)":"","Tatoeba (csb-eng)":"","Tatoeba (cym-eng)":"","Tatoeba (dan-eng)":"","Tatoeba (deu-eng)":"","Tatoeba (dsb-eng)":"","Tatoeba (dtp-eng)":"","Tatoeba (ell-eng)":"","Tatoeba (epo-eng)":"","Tatoeba (est-eng)":"","Tatoeba (eus-eng)":"","Tatoeba (fao-eng)":"","Tatoeba (fin-eng)":"","Tatoeba (fra-eng)":"","Tatoeba (fry-eng)":"","Tatoeba (gla-eng)":"","Tatoeba (gle-eng)":"","Tatoeba (glg-eng)":"","Tatoeba (gsw-eng)":"","Tatoeba (heb-eng)":"","Tatoeba (hin-eng)":"","Tatoeba (hrv-eng)":"","Tatoeba (hsb-eng)":"","Tatoeba (hun-eng)":"","Tatoeba (hye-eng)":"","Tatoeba (ido-eng)":"","Tatoeba (ile-eng)":"","Tatoeba (ina-eng)":"","Tatoeba (ind-eng)":"","Tatoeba (isl-eng)":"","Tatoeba (ita-eng)":"","Tatoeba (jav-eng)":"","Tatoeba (jpn-eng)":"","Tatoeba (kab-eng)":"","Tatoeba (kat-eng)":"","Tatoeba (kaz-eng)":"","Tatoeba (khm-eng)":"","Tatoeba (kor-eng)":"","Tatoeba (kur-eng)":"","Tatoeba (kzj-eng)":"","Tatoeba (lat-eng)":"","Tatoeba (lfn-eng)":"","Tatoeba (lit-eng)":"","Tatoeba (lvs-eng)":"","Tatoeba (mal-eng)":"","Tatoeba (mar-eng)":"","Tatoeba (max-eng)":"","Tatoeba (mhr-eng)":"","Tatoeba (mkd-eng)":"","Tatoeba (mon-eng)":"","Tatoeba (nds-eng)":"","Tatoeba (nld-eng)":"","Tatoeba (nno-eng)":"","Tatoeba (nob-eng)":"","Tatoeba (nov-eng)":"","Tatoeba (oci-eng)":"","Tatoeba (orv-eng)":"","Tatoeba (pam-eng)":"","Tatoeba (pes-eng)":"","Tatoeba (pms-eng)":"","Tatoeba (pol-eng)":"","Tatoeba (por-eng)":"","Tatoeba (ron-eng)":"","Tatoeba (rus-eng)":"","Tatoeba (slk-eng)":"","Tatoeba (slv-eng)":"","Tatoeba (spa-eng)":"","Tatoeba (sqi-eng)":"","Tatoeba (srp-eng)":"","Tatoeba (swe-eng)":"","Tatoeba (swg-eng)":"","Tatoeba (swh-eng)":"","Tatoeba (tam-eng)":"","Tatoeba (tat-eng)":"","Tatoeba (tel-eng)":"","Tatoeba (tgl-eng)":"","Tatoeba (tha-eng)":"","Tatoeba (tuk-eng)":"","Tatoeba (tur-eng)":"","Tatoeba (tzl-eng)":"","Tatoeba (uig-eng)":"","Tatoeba (ukr-eng)":"","Tatoeba (urd-eng)":"","Tatoeba (uzb-eng)":"","Tatoeba (vie-eng)":"","Tatoeba (war-eng)":"","Tatoeba (wuu-eng)":"","Tatoeba (xho-eng)":"","Tatoeba (yid-eng)":"","Tatoeba (yue-eng)":"","Tatoeba (zsm-eng)":""} -{"index":18,"Rank":26,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BUCC (de-en)":98.87,"BUCC (fr-en)":"","BUCC (ru-en)":"","BUCC (zh-en)":"","Tatoeba (afr-eng)":"","Tatoeba (amh-eng)":"","Tatoeba (ang-eng)":"","Tatoeba (ara-eng)":"","Tatoeba (arq-eng)":"","Tatoeba (arz-eng)":"","Tatoeba (ast-eng)":"","Tatoeba (awa-eng)":"","Tatoeba (aze-eng)":"","Tatoeba (bel-eng)":"","Tatoeba (ben-eng)":"","Tatoeba (ber-eng)":"","Tatoeba (bos-eng)":"","Tatoeba (bre-eng)":"","Tatoeba (bul-eng)":"","Tatoeba (cat-eng)":"","Tatoeba (cbk-eng)":"","Tatoeba (ceb-eng)":"","Tatoeba (ces-eng)":"","Tatoeba (cha-eng)":"","Tatoeba (cmn-eng)":"","Tatoeba (cor-eng)":"","Tatoeba (csb-eng)":"","Tatoeba (cym-eng)":"","Tatoeba (dan-eng)":"","Tatoeba (deu-eng)":"","Tatoeba (dsb-eng)":"","Tatoeba (dtp-eng)":"","Tatoeba (ell-eng)":"","Tatoeba (epo-eng)":"","Tatoeba (est-eng)":"","Tatoeba (eus-eng)":"","Tatoeba (fao-eng)":"","Tatoeba (fin-eng)":"","Tatoeba (fra-eng)":"","Tatoeba (fry-eng)":"","Tatoeba (gla-eng)":"","Tatoeba (gle-eng)":"","Tatoeba (glg-eng)":"","Tatoeba (gsw-eng)":"","Tatoeba (heb-eng)":"","Tatoeba (hin-eng)":"","Tatoeba (hrv-eng)":"","Tatoeba (hsb-eng)":"","Tatoeba (hun-eng)":"","Tatoeba (hye-eng)":"","Tatoeba (ido-eng)":"","Tatoeba (ile-eng)":"","Tatoeba (ina-eng)":"","Tatoeba (ind-eng)":"","Tatoeba (isl-eng)":"","Tatoeba (ita-eng)":"","Tatoeba (jav-eng)":"","Tatoeba (jpn-eng)":"","Tatoeba (kab-eng)":"","Tatoeba (kat-eng)":"","Tatoeba (kaz-eng)":"","Tatoeba (khm-eng)":"","Tatoeba (kor-eng)":"","Tatoeba (kur-eng)":"","Tatoeba (kzj-eng)":"","Tatoeba (lat-eng)":"","Tatoeba (lfn-eng)":"","Tatoeba (lit-eng)":"","Tatoeba (lvs-eng)":"","Tatoeba (mal-eng)":"","Tatoeba (mar-eng)":"","Tatoeba (max-eng)":"","Tatoeba (mhr-eng)":"","Tatoeba (mkd-eng)":"","Tatoeba (mon-eng)":"","Tatoeba (nds-eng)":"","Tatoeba (nld-eng)":"","Tatoeba (nno-eng)":"","Tatoeba (nob-eng)":"","Tatoeba (nov-eng)":"","Tatoeba (oci-eng)":"","Tatoeba (orv-eng)":"","Tatoeba (pam-eng)":"","Tatoeba (pes-eng)":"","Tatoeba (pms-eng)":"","Tatoeba (pol-eng)":"","Tatoeba (por-eng)":"","Tatoeba (ron-eng)":"","Tatoeba (rus-eng)":"","Tatoeba (slk-eng)":"","Tatoeba (slv-eng)":"","Tatoeba (spa-eng)":"","Tatoeba (sqi-eng)":"","Tatoeba (srp-eng)":"","Tatoeba (swe-eng)":"","Tatoeba (swg-eng)":"","Tatoeba (swh-eng)":"","Tatoeba (tam-eng)":"","Tatoeba (tat-eng)":"","Tatoeba (tel-eng)":"","Tatoeba (tgl-eng)":"","Tatoeba (tha-eng)":"","Tatoeba (tuk-eng)":"","Tatoeba (tur-eng)":"","Tatoeba (tzl-eng)":"","Tatoeba (uig-eng)":"","Tatoeba (ukr-eng)":"","Tatoeba (urd-eng)":"","Tatoeba (uzb-eng)":"","Tatoeba (vie-eng)":"","Tatoeba (war-eng)":"","Tatoeba (wuu-eng)":"","Tatoeba (xho-eng)":"","Tatoeba (yid-eng)":"","Tatoeba (yue-eng)":"","Tatoeba (zsm-eng)":""} -{"index":20,"Rank":27,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":"","BUCC (de-en)":0.18,"BUCC (fr-en)":0.19,"BUCC (ru-en)":0.1,"BUCC (zh-en)":0.0,"Tatoeba (afr-eng)":"","Tatoeba (amh-eng)":"","Tatoeba (ang-eng)":"","Tatoeba (ara-eng)":"","Tatoeba (arq-eng)":"","Tatoeba (arz-eng)":"","Tatoeba (ast-eng)":"","Tatoeba (awa-eng)":"","Tatoeba (aze-eng)":"","Tatoeba (bel-eng)":"","Tatoeba (ben-eng)":"","Tatoeba (ber-eng)":"","Tatoeba (bos-eng)":"","Tatoeba (bre-eng)":"","Tatoeba (bul-eng)":"","Tatoeba (cat-eng)":"","Tatoeba (cbk-eng)":"","Tatoeba (ceb-eng)":"","Tatoeba (ces-eng)":"","Tatoeba (cha-eng)":"","Tatoeba (cmn-eng)":"","Tatoeba (cor-eng)":"","Tatoeba (csb-eng)":"","Tatoeba (cym-eng)":"","Tatoeba (dan-eng)":"","Tatoeba (deu-eng)":"","Tatoeba (dsb-eng)":"","Tatoeba (dtp-eng)":"","Tatoeba (ell-eng)":"","Tatoeba (epo-eng)":"","Tatoeba (est-eng)":"","Tatoeba (eus-eng)":"","Tatoeba (fao-eng)":"","Tatoeba (fin-eng)":"","Tatoeba (fra-eng)":"","Tatoeba (fry-eng)":"","Tatoeba (gla-eng)":"","Tatoeba (gle-eng)":"","Tatoeba (glg-eng)":"","Tatoeba (gsw-eng)":"","Tatoeba (heb-eng)":"","Tatoeba (hin-eng)":"","Tatoeba (hrv-eng)":"","Tatoeba (hsb-eng)":"","Tatoeba (hun-eng)":"","Tatoeba (hye-eng)":"","Tatoeba (ido-eng)":"","Tatoeba (ile-eng)":"","Tatoeba (ina-eng)":"","Tatoeba (ind-eng)":"","Tatoeba (isl-eng)":"","Tatoeba (ita-eng)":"","Tatoeba (jav-eng)":"","Tatoeba (jpn-eng)":"","Tatoeba (kab-eng)":"","Tatoeba (kat-eng)":"","Tatoeba (kaz-eng)":"","Tatoeba (khm-eng)":"","Tatoeba (kor-eng)":"","Tatoeba (kur-eng)":"","Tatoeba (kzj-eng)":"","Tatoeba (lat-eng)":"","Tatoeba (lfn-eng)":"","Tatoeba (lit-eng)":"","Tatoeba (lvs-eng)":"","Tatoeba (mal-eng)":"","Tatoeba (mar-eng)":"","Tatoeba (max-eng)":"","Tatoeba (mhr-eng)":"","Tatoeba (mkd-eng)":"","Tatoeba (mon-eng)":"","Tatoeba (nds-eng)":"","Tatoeba (nld-eng)":"","Tatoeba (nno-eng)":"","Tatoeba (nob-eng)":"","Tatoeba (nov-eng)":"","Tatoeba (oci-eng)":"","Tatoeba (orv-eng)":"","Tatoeba (pam-eng)":"","Tatoeba (pes-eng)":"","Tatoeba (pms-eng)":"","Tatoeba (pol-eng)":"","Tatoeba (por-eng)":"","Tatoeba (ron-eng)":"","Tatoeba (rus-eng)":"","Tatoeba (slk-eng)":"","Tatoeba (slv-eng)":"","Tatoeba (spa-eng)":"","Tatoeba (sqi-eng)":"","Tatoeba (srp-eng)":"","Tatoeba (swe-eng)":"","Tatoeba (swg-eng)":"","Tatoeba (swh-eng)":"","Tatoeba (tam-eng)":"","Tatoeba (tat-eng)":"","Tatoeba (tel-eng)":"","Tatoeba (tgl-eng)":"","Tatoeba (tha-eng)":"","Tatoeba (tuk-eng)":"","Tatoeba (tur-eng)":"","Tatoeba (tzl-eng)":"","Tatoeba (uig-eng)":"","Tatoeba (ukr-eng)":"","Tatoeba (urd-eng)":"","Tatoeba (uzb-eng)":"","Tatoeba (vie-eng)":"","Tatoeba (war-eng)":"","Tatoeba (wuu-eng)":"","Tatoeba (xho-eng)":"","Tatoeba (yid-eng)":"","Tatoeba (yue-eng)":"","Tatoeba (zsm-eng)":""} -{"index":27,"Rank":28,"Model":"bi-cse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BUCC (de-en)":"","BUCC (fr-en)":"","BUCC (ru-en)":"","BUCC (zh-en)":98.39,"Tatoeba (afr-eng)":"","Tatoeba (amh-eng)":"","Tatoeba (ang-eng)":"","Tatoeba (ara-eng)":"","Tatoeba (arq-eng)":"","Tatoeba (arz-eng)":"","Tatoeba (ast-eng)":"","Tatoeba (awa-eng)":"","Tatoeba (aze-eng)":"","Tatoeba (bel-eng)":"","Tatoeba (ben-eng)":"","Tatoeba (ber-eng)":"","Tatoeba (bos-eng)":"","Tatoeba (bre-eng)":"","Tatoeba (bul-eng)":"","Tatoeba (cat-eng)":"","Tatoeba (cbk-eng)":"","Tatoeba (ceb-eng)":"","Tatoeba (ces-eng)":"","Tatoeba (cha-eng)":"","Tatoeba (cmn-eng)":95.15,"Tatoeba (cor-eng)":"","Tatoeba (csb-eng)":"","Tatoeba (cym-eng)":"","Tatoeba (dan-eng)":"","Tatoeba (deu-eng)":"","Tatoeba (dsb-eng)":"","Tatoeba (dtp-eng)":"","Tatoeba (ell-eng)":"","Tatoeba (epo-eng)":"","Tatoeba (est-eng)":"","Tatoeba (eus-eng)":"","Tatoeba (fao-eng)":"","Tatoeba (fin-eng)":"","Tatoeba (fra-eng)":"","Tatoeba (fry-eng)":"","Tatoeba (gla-eng)":"","Tatoeba (gle-eng)":"","Tatoeba (glg-eng)":"","Tatoeba (gsw-eng)":"","Tatoeba (heb-eng)":"","Tatoeba (hin-eng)":"","Tatoeba (hrv-eng)":"","Tatoeba (hsb-eng)":"","Tatoeba (hun-eng)":"","Tatoeba (hye-eng)":"","Tatoeba (ido-eng)":"","Tatoeba (ile-eng)":"","Tatoeba (ina-eng)":"","Tatoeba (ind-eng)":"","Tatoeba (isl-eng)":"","Tatoeba (ita-eng)":"","Tatoeba (jav-eng)":"","Tatoeba (jpn-eng)":"","Tatoeba (kab-eng)":"","Tatoeba (kat-eng)":"","Tatoeba (kaz-eng)":"","Tatoeba (khm-eng)":"","Tatoeba (kor-eng)":"","Tatoeba (kur-eng)":"","Tatoeba (kzj-eng)":"","Tatoeba (lat-eng)":"","Tatoeba (lfn-eng)":"","Tatoeba (lit-eng)":"","Tatoeba (lvs-eng)":"","Tatoeba (mal-eng)":"","Tatoeba (mar-eng)":"","Tatoeba (max-eng)":"","Tatoeba (mhr-eng)":"","Tatoeba (mkd-eng)":"","Tatoeba (mon-eng)":"","Tatoeba (nds-eng)":"","Tatoeba (nld-eng)":"","Tatoeba (nno-eng)":"","Tatoeba (nob-eng)":"","Tatoeba (nov-eng)":"","Tatoeba (oci-eng)":"","Tatoeba (orv-eng)":"","Tatoeba (pam-eng)":"","Tatoeba (pes-eng)":"","Tatoeba (pms-eng)":"","Tatoeba (pol-eng)":"","Tatoeba (por-eng)":"","Tatoeba (ron-eng)":"","Tatoeba (rus-eng)":"","Tatoeba (slk-eng)":"","Tatoeba (slv-eng)":"","Tatoeba (spa-eng)":"","Tatoeba (sqi-eng)":"","Tatoeba (srp-eng)":"","Tatoeba (swe-eng)":"","Tatoeba (swg-eng)":"","Tatoeba (swh-eng)":"","Tatoeba (tam-eng)":"","Tatoeba (tat-eng)":"","Tatoeba (tel-eng)":"","Tatoeba (tgl-eng)":"","Tatoeba (tha-eng)":"","Tatoeba (tuk-eng)":"","Tatoeba (tur-eng)":"","Tatoeba (tzl-eng)":"","Tatoeba (uig-eng)":"","Tatoeba (ukr-eng)":"","Tatoeba (urd-eng)":"","Tatoeba (uzb-eng)":"","Tatoeba (vie-eng)":"","Tatoeba (war-eng)":"","Tatoeba (wuu-eng)":"","Tatoeba (xho-eng)":"","Tatoeba (yid-eng)":"","Tatoeba (yue-eng)":"","Tatoeba (zsm-eng)":""} +{"Rank":1,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":81.75,"BUCC (de-en)":99.35,"BUCC (fr-en)":98.72,"BUCC (ru-en)":97.78,"BUCC (zh-en)":99.16,"Tatoeba (afr-eng)":96.18,"Tatoeba (amh-eng)":91.47,"Tatoeba (ang-eng)":59.28,"Tatoeba (ara-eng)":88.8,"Tatoeba (arq-eng)":42.69,"Tatoeba (arz-eng)":76.0,"Tatoeba (ast-eng)":90.68,"Tatoeba (awa-eng)":71.7,"Tatoeba (aze-eng)":94.93,"Tatoeba (bel-eng)":95.0,"Tatoeba (ben-eng)":88.55,"Tatoeba (ber-eng)":8.4,"Tatoeba (bos-eng)":94.92,"Tatoeba (bre-eng)":15.07,"Tatoeba (bul-eng)":94.58,"Tatoeba (cat-eng)":95.38,"Tatoeba (cbk-eng)":79.44,"Tatoeba (ceb-eng)":64.42,"Tatoeba (ces-eng)":96.68,"Tatoeba (cha-eng)":31.77,"Tatoeba (cmn-eng)":95.1,"Tatoeba (cor-eng)":10.11,"Tatoeba (csb-eng)":52.57,"Tatoeba (cym-eng)":92.0,"Tatoeba (dan-eng)":95.71,"Tatoeba (deu-eng)":99.2,"Tatoeba (dsb-eng)":64.81,"Tatoeba (dtp-eng)":10.85,"Tatoeba (ell-eng)":95.35,"Tatoeba (epo-eng)":98.2,"Tatoeba (est-eng)":96.55,"Tatoeba (eus-eng)":95.01,"Tatoeba (fao-eng)":87.4,"Tatoeba (fin-eng)":96.37,"Tatoeba (fra-eng)":94.86,"Tatoeba (fry-eng)":89.31,"Tatoeba (gla-eng)":85.66,"Tatoeba (gle-eng)":93.8,"Tatoeba (glg-eng)":96.82,"Tatoeba (gsw-eng)":46.5,"Tatoeba (heb-eng)":91.53,"Tatoeba (hin-eng)":96.87,"Tatoeba (hrv-eng)":96.95,"Tatoeba (hsb-eng)":67.11,"Tatoeba (hun-eng)":96.55,"Tatoeba (hye-eng)":94.09,"Tatoeba (ido-eng)":89.42,"Tatoeba (ile-eng)":85.58,"Tatoeba (ina-eng)":95.37,"Tatoeba (ind-eng)":93.66,"Tatoeba (isl-eng)":94.75,"Tatoeba (ita-eng)":92.72,"Tatoeba (jav-eng)":79.77,"Tatoeba (jpn-eng)":95.38,"Tatoeba (kab-eng)":4.31,"Tatoeba (kat-eng)":95.02,"Tatoeba (kaz-eng)":87.49,"Tatoeba (khm-eng)":78.37,"Tatoeba (kor-eng)":90.95,"Tatoeba (kur-eng)":83.59,"Tatoeba (kzj-eng)":11.33,"Tatoeba (lat-eng)":80.07,"Tatoeba (lfn-eng)":67.54,"Tatoeba (lit-eng)":96.47,"Tatoeba (lvs-eng)":95.88,"Tatoeba (mal-eng)":98.45,"Tatoeba (mar-eng)":92.65,"Tatoeba (max-eng)":63.26,"Tatoeba (mhr-eng)":15.74,"Tatoeba (mkd-eng)":93.6,"Tatoeba (mon-eng)":95.91,"Tatoeba (nds-eng)":79.42,"Tatoeba (nld-eng)":96.07,"Tatoeba (nno-eng)":94.48,"Tatoeba (nob-eng)":98.4,"Tatoeba (nov-eng)":74.38,"Tatoeba (oci-eng)":65.81,"Tatoeba (orv-eng)":38.93,"Tatoeba (pam-eng)":10.73,"Tatoeba (pes-eng)":94.7,"Tatoeba (pms-eng)":64.57,"Tatoeba (pol-eng)":97.22,"Tatoeba (por-eng)":94.14,"Tatoeba (ron-eng)":96.92,"Tatoeba (rus-eng)":93.75,"Tatoeba (slk-eng)":96.5,"Tatoeba (slv-eng)":96.03,"Tatoeba (spa-eng)":98.4,"Tatoeba (sqi-eng)":96.76,"Tatoeba (srp-eng)":94.43,"Tatoeba (swe-eng)":95.63,"Tatoeba (swg-eng)":59.36,"Tatoeba (swh-eng)":84.5,"Tatoeba (tam-eng)":89.0,"Tatoeba (tat-eng)":85.92,"Tatoeba (tel-eng)":97.86,"Tatoeba (tgl-eng)":96.02,"Tatoeba (tha-eng)":96.14,"Tatoeba (tuk-eng)":75.27,"Tatoeba (tur-eng)":98.0,"Tatoeba (tzl-eng)":58.88,"Tatoeba (uig-eng)":92.4,"Tatoeba (ukr-eng)":93.97,"Tatoeba (urd-eng)":93.22,"Tatoeba (uzb-eng)":84.23,"Tatoeba (vie-eng)":97.2,"Tatoeba (war-eng)":60.29,"Tatoeba (wuu-eng)":90.18,"Tatoeba (xho-eng)":91.55,"Tatoeba (yid-eng)":88.79,"Tatoeba (yue-eng)":89.58,"Tatoeba (zsm-eng)":95.62} +{"Rank":2,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":67.42,"BUCC (de-en)":99.21,"BUCC (fr-en)":98.39,"BUCC (ru-en)":97.62,"BUCC (zh-en)":97.7,"Tatoeba (afr-eng)":92.59,"Tatoeba (amh-eng)":80.82,"Tatoeba (ang-eng)":25.22,"Tatoeba (ara-eng)":90.14,"Tatoeba (arq-eng)":26.63,"Tatoeba (arz-eng)":66.16,"Tatoeba (ast-eng)":76.35,"Tatoeba (awa-eng)":33.74,"Tatoeba (aze-eng)":82.41,"Tatoeba (bel-eng)":79.54,"Tatoeba (ben-eng)":89.43,"Tatoeba (ber-eng)":77.63,"Tatoeba (bos-eng)":95.86,"Tatoeba (bre-eng)":31.2,"Tatoeba (bul-eng)":93.57,"Tatoeba (cat-eng)":95.8,"Tatoeba (cbk-eng)":77.17,"Tatoeba (ceb-eng)":9.93,"Tatoeba (ces-eng)":95.52,"Tatoeba (cha-eng)":14.86,"Tatoeba (cmn-eng)":85.62,"Tatoeba (cor-eng)":4.45,"Tatoeba (csb-eng)":27.03,"Tatoeba (cym-eng)":5.85,"Tatoeba (dan-eng)":95.22,"Tatoeba (deu-eng)":99.07,"Tatoeba (dsb-eng)":42.34,"Tatoeba (dtp-eng)":7.39,"Tatoeba (ell-eng)":96.2,"Tatoeba (epo-eng)":96.61,"Tatoeba (est-eng)":96.43,"Tatoeba (eus-eng)":93.32,"Tatoeba (fao-eng)":57.04,"Tatoeba (fin-eng)":96.98,"Tatoeba (fra-eng)":94.28,"Tatoeba (fry-eng)":42.07,"Tatoeba (gla-eng)":1.52,"Tatoeba (gle-eng)":4.2,"Tatoeba (glg-eng)":96.14,"Tatoeba (gsw-eng)":27.52,"Tatoeba (heb-eng)":0.0,"Tatoeba (hin-eng)":95.32,"Tatoeba (hrv-eng)":96.72,"Tatoeba (hsb-eng)":45.75,"Tatoeba (hun-eng)":95.2,"Tatoeba (hye-eng)":88.72,"Tatoeba (ido-eng)":80.86,"Tatoeba (ile-eng)":87.88,"Tatoeba (ina-eng)":93.93,"Tatoeba (ind-eng)":92.98,"Tatoeba (isl-eng)":94.32,"Tatoeba (ita-eng)":94.32,"Tatoeba (jav-eng)":9.95,"Tatoeba (jpn-eng)":93.78,"Tatoeba (kab-eng)":65.88,"Tatoeba (kat-eng)":81.16,"Tatoeba (kaz-eng)":53.3,"Tatoeba (khm-eng)":74.19,"Tatoeba (kor-eng)":87.97,"Tatoeba (kur-eng)":19.09,"Tatoeba (kzj-eng)":4.46,"Tatoeba (lat-eng)":64.81,"Tatoeba (lfn-eng)":63.39,"Tatoeba (lit-eng)":96.2,"Tatoeba (lvs-eng)":95.33,"Tatoeba (mal-eng)":98.16,"Tatoeba (mar-eng)":92.93,"Tatoeba (max-eng)":36.96,"Tatoeba (mhr-eng)":6.86,"Tatoeba (mkd-eng)":93.63,"Tatoeba (mon-eng)":3.42,"Tatoeba (nds-eng)":77.13,"Tatoeba (nld-eng)":95.35,"Tatoeba (nno-eng)":72.75,"Tatoeba (nob-eng)":95.77,"Tatoeba (nov-eng)":60.02,"Tatoeba (oci-eng)":58.13,"Tatoeba (orv-eng)":23.24,"Tatoeba (pam-eng)":3.24,"Tatoeba (pes-eng)":93.13,"Tatoeba (pms-eng)":36.23,"Tatoeba (pol-eng)":97.32,"Tatoeba (por-eng)":94.54,"Tatoeba (ron-eng)":96.52,"Tatoeba (rus-eng)":92.58,"Tatoeba (slk-eng)":95.82,"Tatoeba (slv-eng)":95.4,"Tatoeba (spa-eng)":97.33,"Tatoeba (sqi-eng)":97.22,"Tatoeba (srp-eng)":93.64,"Tatoeba (swe-eng)":95.31,"Tatoeba (swg-eng)":33.1,"Tatoeba (swh-eng)":55.66,"Tatoeba (tam-eng)":87.32,"Tatoeba (tat-eng)":34.74,"Tatoeba (tel-eng)":96.72,"Tatoeba (tgl-eng)":63.19,"Tatoeba (tha-eng)":96.38,"Tatoeba (tuk-eng)":16.35,"Tatoeba (tur-eng)":98.03,"Tatoeba (tzl-eng)":36.56,"Tatoeba (uig-eng)":56.49,"Tatoeba (ukr-eng)":93.52,"Tatoeba (urd-eng)":84.23,"Tatoeba (uzb-eng)":23.2,"Tatoeba (vie-eng)":96.73,"Tatoeba (war-eng)":8.25,"Tatoeba (wuu-eng)":75.09,"Tatoeba (xho-eng)":4.68,"Tatoeba (yid-eng)":2.49,"Tatoeba (yue-eng)":87.75,"Tatoeba (zsm-eng)":95.41} +{"Rank":3,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":63.38,"BUCC (de-en)":98.59,"BUCC (fr-en)":96.89,"BUCC (ru-en)":96.44,"BUCC (zh-en)":97.56,"Tatoeba (afr-eng)":72.96,"Tatoeba (amh-eng)":53.49,"Tatoeba (ang-eng)":16.72,"Tatoeba (ara-eng)":90.19,"Tatoeba (arq-eng)":19.84,"Tatoeba (arz-eng)":55.69,"Tatoeba (ast-eng)":70.08,"Tatoeba (awa-eng)":42.83,"Tatoeba (aze-eng)":76.36,"Tatoeba (bel-eng)":79.94,"Tatoeba (ben-eng)":64.9,"Tatoeba (ber-eng)":4.88,"Tatoeba (bos-eng)":94.02,"Tatoeba (bre-eng)":6.42,"Tatoeba (bul-eng)":93.52,"Tatoeba (cat-eng)":96.05,"Tatoeba (cbk-eng)":58.68,"Tatoeba (ceb-eng)":7.39,"Tatoeba (ces-eng)":95.73,"Tatoeba (cha-eng)":12.59,"Tatoeba (cmn-eng)":95.83,"Tatoeba (cor-eng)":3.53,"Tatoeba (csb-eng)":23.73,"Tatoeba (cym-eng)":22.31,"Tatoeba (dan-eng)":96.17,"Tatoeba (deu-eng)":97.73,"Tatoeba (dsb-eng)":36.85,"Tatoeba (dtp-eng)":5.03,"Tatoeba (ell-eng)":94.93,"Tatoeba (epo-eng)":55.12,"Tatoeba (est-eng)":98.4,"Tatoeba (eus-eng)":31.33,"Tatoeba (fao-eng)":38.24,"Tatoeba (fin-eng)":95.92,"Tatoeba (fra-eng)":93.12,"Tatoeba (fry-eng)":43.54,"Tatoeba (gla-eng)":4.72,"Tatoeba (gle-eng)":16.85,"Tatoeba (glg-eng)":95.32,"Tatoeba (gsw-eng)":25.12,"Tatoeba (heb-eng)":88.26,"Tatoeba (hin-eng)":97.75,"Tatoeba (hrv-eng)":97.0,"Tatoeba (hsb-eng)":44.32,"Tatoeba (hun-eng)":94.18,"Tatoeba (hye-eng)":94.38,"Tatoeba (ido-eng)":43.91,"Tatoeba (ile-eng)":60.36,"Tatoeba (ina-eng)":84.32,"Tatoeba (ind-eng)":93.5,"Tatoeba (isl-eng)":59.25,"Tatoeba (ita-eng)":93.76,"Tatoeba (jav-eng)":23.39,"Tatoeba (jpn-eng)":92.51,"Tatoeba (kab-eng)":1.41,"Tatoeba (kat-eng)":95.46,"Tatoeba (kaz-eng)":61.49,"Tatoeba (khm-eng)":58.8,"Tatoeba (kor-eng)":93.07,"Tatoeba (kur-eng)":61.44,"Tatoeba (kzj-eng)":5.88,"Tatoeba (lat-eng)":24.25,"Tatoeba (lfn-eng)":49.56,"Tatoeba (lit-eng)":95.37,"Tatoeba (lvs-eng)":97.53,"Tatoeba (mal-eng)":88.46,"Tatoeba (mar-eng)":93.83,"Tatoeba (max-eng)":48.77,"Tatoeba (mhr-eng)":7.57,"Tatoeba (mkd-eng)":93.02,"Tatoeba (mon-eng)":96.14,"Tatoeba (nds-eng)":38.88,"Tatoeba (nld-eng)":95.5,"Tatoeba (nno-eng)":81.41,"Tatoeba (nob-eng)":98.53,"Tatoeba (nov-eng)":50.23,"Tatoeba (oci-eng)":43.49,"Tatoeba (orv-eng)":23.77,"Tatoeba (pam-eng)":5.39,"Tatoeba (pes-eng)":93.47,"Tatoeba (pms-eng)":34.19,"Tatoeba (pol-eng)":96.95,"Tatoeba (por-eng)":93.02,"Tatoeba (ron-eng)":96.43,"Tatoeba (rus-eng)":92.92,"Tatoeba (slk-eng)":96.62,"Tatoeba (slv-eng)":97.08,"Tatoeba (spa-eng)":97.0,"Tatoeba (sqi-eng)":98.57,"Tatoeba (srp-eng)":94.12,"Tatoeba (swe-eng)":95.45,"Tatoeba (swg-eng)":22.8,"Tatoeba (swh-eng)":16.02,"Tatoeba (tam-eng)":73.6,"Tatoeba (tat-eng)":10.89,"Tatoeba (tel-eng)":79.73,"Tatoeba (tgl-eng)":17.67,"Tatoeba (tha-eng)":95.99,"Tatoeba (tuk-eng)":14.91,"Tatoeba (tur-eng)":96.17,"Tatoeba (tzl-eng)":34.21,"Tatoeba (uig-eng)":48.35,"Tatoeba (ukr-eng)":92.67,"Tatoeba (urd-eng)":95.12,"Tatoeba (uzb-eng)":23.19,"Tatoeba (vie-eng)":97.23,"Tatoeba (war-eng)":7.42,"Tatoeba (wuu-eng)":78.25,"Tatoeba (xho-eng)":6.53,"Tatoeba (yid-eng)":30.73,"Tatoeba (yue-eng)":77.58,"Tatoeba (zsm-eng)":95.8} +{"Rank":4,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":57.98,"BUCC (de-en)":97.11,"BUCC (fr-en)":94.99,"BUCC (ru-en)":95.06,"BUCC (zh-en)":95.63,"Tatoeba (afr-eng)":58.22,"Tatoeba (amh-eng)":36.21,"Tatoeba (ang-eng)":10.24,"Tatoeba (ara-eng)":87.93,"Tatoeba (arq-eng)":18.6,"Tatoeba (arz-eng)":51.26,"Tatoeba (ast-eng)":62.17,"Tatoeba (awa-eng)":33.43,"Tatoeba (aze-eng)":62.1,"Tatoeba (bel-eng)":67.73,"Tatoeba (ben-eng)":36.48,"Tatoeba (ber-eng)":4.43,"Tatoeba (bos-eng)":93.27,"Tatoeba (bre-eng)":5.56,"Tatoeba (bul-eng)":92.65,"Tatoeba (cat-eng)":94.42,"Tatoeba (cbk-eng)":55.37,"Tatoeba (ceb-eng)":8.05,"Tatoeba (ces-eng)":95.12,"Tatoeba (cha-eng)":15.98,"Tatoeba (cmn-eng)":94.93,"Tatoeba (cor-eng)":3.42,"Tatoeba (csb-eng)":21.56,"Tatoeba (cym-eng)":13.25,"Tatoeba (dan-eng)":94.8,"Tatoeba (deu-eng)":97.02,"Tatoeba (dsb-eng)":33.43,"Tatoeba (dtp-eng)":5.69,"Tatoeba (ell-eng)":95.43,"Tatoeba (epo-eng)":41.73,"Tatoeba (est-eng)":97.33,"Tatoeba (eus-eng)":23.18,"Tatoeba (fao-eng)":27.51,"Tatoeba (fin-eng)":93.1,"Tatoeba (fra-eng)":91.72,"Tatoeba (fry-eng)":31.13,"Tatoeba (gla-eng)":3.61,"Tatoeba (gle-eng)":11.62,"Tatoeba (glg-eng)":94.0,"Tatoeba (gsw-eng)":25.74,"Tatoeba (heb-eng)":86.88,"Tatoeba (hin-eng)":97.62,"Tatoeba (hrv-eng)":95.98,"Tatoeba (hsb-eng)":36.1,"Tatoeba (hun-eng)":91.58,"Tatoeba (hye-eng)":93.28,"Tatoeba (ido-eng)":40.25,"Tatoeba (ile-eng)":57.71,"Tatoeba (ina-eng)":79.13,"Tatoeba (ind-eng)":92.74,"Tatoeba (isl-eng)":24.07,"Tatoeba (ita-eng)":93.05,"Tatoeba (jav-eng)":17.04,"Tatoeba (jpn-eng)":90.41,"Tatoeba (kab-eng)":1.16,"Tatoeba (kat-eng)":95.44,"Tatoeba (kaz-eng)":34.89,"Tatoeba (khm-eng)":32.11,"Tatoeba (kor-eng)":92.52,"Tatoeba (kur-eng)":46.94,"Tatoeba (kzj-eng)":6.24,"Tatoeba (lat-eng)":19.47,"Tatoeba (lfn-eng)":47.02,"Tatoeba (lit-eng)":93.16,"Tatoeba (lvs-eng)":97.87,"Tatoeba (mal-eng)":32.2,"Tatoeba (mar-eng)":92.38,"Tatoeba (max-eng)":45.25,"Tatoeba (mhr-eng)":6.89,"Tatoeba (mkd-eng)":91.0,"Tatoeba (mon-eng)":95.04,"Tatoeba (nds-eng)":32.16,"Tatoeba (nld-eng)":94.58,"Tatoeba (nno-eng)":76.34,"Tatoeba (nob-eng)":97.73,"Tatoeba (nov-eng)":47.99,"Tatoeba (oci-eng)":38.57,"Tatoeba (orv-eng)":15.1,"Tatoeba (pam-eng)":5.41,"Tatoeba (pes-eng)":92.59,"Tatoeba (pms-eng)":30.7,"Tatoeba (pol-eng)":94.28,"Tatoeba (por-eng)":92.13,"Tatoeba (ron-eng)":95.3,"Tatoeba (rus-eng)":91.87,"Tatoeba (slk-eng)":95.15,"Tatoeba (slv-eng)":96.92,"Tatoeba (spa-eng)":95.42,"Tatoeba (sqi-eng)":98.17,"Tatoeba (srp-eng)":92.24,"Tatoeba (swe-eng)":94.42,"Tatoeba (swg-eng)":26.31,"Tatoeba (swh-eng)":14.48,"Tatoeba (tam-eng)":24.64,"Tatoeba (tat-eng)":10.25,"Tatoeba (tel-eng)":36.4,"Tatoeba (tgl-eng)":13.09,"Tatoeba (tha-eng)":96.72,"Tatoeba (tuk-eng)":15.16,"Tatoeba (tur-eng)":95.08,"Tatoeba (tzl-eng)":25.46,"Tatoeba (uig-eng)":24.39,"Tatoeba (ukr-eng)":92.82,"Tatoeba (urd-eng)":94.57,"Tatoeba (uzb-eng)":17.14,"Tatoeba (vie-eng)":95.12,"Tatoeba (war-eng)":7.25,"Tatoeba (wuu-eng)":76.0,"Tatoeba (xho-eng)":4.52,"Tatoeba (yid-eng)":14.38,"Tatoeba (yue-eng)":71.45,"Tatoeba (zsm-eng)":95.31} +{"Rank":5,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":21.98,"BUCC (de-en)":95.04,"BUCC (fr-en)":94.96,"BUCC (ru-en)":8.33,"BUCC (zh-en)":1.3,"Tatoeba (afr-eng)":41.84,"Tatoeba (amh-eng)":0.03,"Tatoeba (ang-eng)":37.87,"Tatoeba (ara-eng)":0.61,"Tatoeba (arq-eng)":0.74,"Tatoeba (arz-eng)":0.42,"Tatoeba (ast-eng)":65.41,"Tatoeba (awa-eng)":1.46,"Tatoeba (aze-eng)":8.79,"Tatoeba (bel-eng)":5.76,"Tatoeba (ben-eng)":0.01,"Tatoeba (ber-eng)":5.92,"Tatoeba (bos-eng)":16.12,"Tatoeba (bre-eng)":6.12,"Tatoeba (bul-eng)":9.06,"Tatoeba (cat-eng)":57.4,"Tatoeba (cbk-eng)":57.68,"Tatoeba (ceb-eng)":12.56,"Tatoeba (ces-eng)":9.47,"Tatoeba (cha-eng)":27.13,"Tatoeba (cmn-eng)":1.82,"Tatoeba (cor-eng)":3.87,"Tatoeba (csb-eng)":14.41,"Tatoeba (cym-eng)":6.69,"Tatoeba (dan-eng)":54.87,"Tatoeba (deu-eng)":93.72,"Tatoeba (dsb-eng)":14.74,"Tatoeba (dtp-eng)":5.84,"Tatoeba (ell-eng)":0.6,"Tatoeba (epo-eng)":30.8,"Tatoeba (est-eng)":5.39,"Tatoeba (eus-eng)":11.9,"Tatoeba (fao-eng)":28.08,"Tatoeba (fin-eng)":6.81,"Tatoeba (fra-eng)":85.29,"Tatoeba (fry-eng)":38.68,"Tatoeba (gla-eng)":2.96,"Tatoeba (gle-eng)":3.74,"Tatoeba (glg-eng)":70.0,"Tatoeba (gsw-eng)":30.49,"Tatoeba (heb-eng)":0.87,"Tatoeba (hin-eng)":0.1,"Tatoeba (hrv-eng)":17.43,"Tatoeba (hsb-eng)":14.69,"Tatoeba (hun-eng)":7.28,"Tatoeba (hye-eng)":0.77,"Tatoeba (ido-eng)":46.65,"Tatoeba (ile-eng)":59.43,"Tatoeba (ina-eng)":82.71,"Tatoeba (ind-eng)":37.26,"Tatoeba (isl-eng)":11.21,"Tatoeba (ita-eng)":79.77,"Tatoeba (jav-eng)":7.81,"Tatoeba (jpn-eng)":0.91,"Tatoeba (kab-eng)":2.23,"Tatoeba (kat-eng)":1.48,"Tatoeba (kaz-eng)":1.77,"Tatoeba (khm-eng)":0.38,"Tatoeba (kor-eng)":1.96,"Tatoeba (kur-eng)":12.11,"Tatoeba (kzj-eng)":6.13,"Tatoeba (lat-eng)":27.84,"Tatoeba (lfn-eng)":45.89,"Tatoeba (lit-eng)":5.94,"Tatoeba (lvs-eng)":8.11,"Tatoeba (mal-eng)":0.59,"Tatoeba (mar-eng)":0.03,"Tatoeba (max-eng)":21.7,"Tatoeba (mhr-eng)":0.68,"Tatoeba (mkd-eng)":5.92,"Tatoeba (mon-eng)":2.39,"Tatoeba (nds-eng)":45.04,"Tatoeba (nld-eng)":64.75,"Tatoeba (nno-eng)":36.74,"Tatoeba (nob-eng)":54.77,"Tatoeba (nov-eng)":57.12,"Tatoeba (oci-eng)":34.39,"Tatoeba (orv-eng)":2.04,"Tatoeba (pam-eng)":8.34,"Tatoeba (pes-eng)":0.87,"Tatoeba (pms-eng)":38.06,"Tatoeba (pol-eng)":28.35,"Tatoeba (por-eng)":83.61,"Tatoeba (ron-eng)":65.27,"Tatoeba (rus-eng)":30.42,"Tatoeba (slk-eng)":13.19,"Tatoeba (slv-eng)":13.49,"Tatoeba (spa-eng)":89.18,"Tatoeba (sqi-eng)":14.66,"Tatoeba (srp-eng)":13.24,"Tatoeba (swe-eng)":60.67,"Tatoeba (swg-eng)":34.76,"Tatoeba (swh-eng)":8.07,"Tatoeba (tam-eng)":0.36,"Tatoeba (tat-eng)":1.46,"Tatoeba (tel-eng)":0.67,"Tatoeba (tgl-eng)":25.22,"Tatoeba (tha-eng)":1.58,"Tatoeba (tuk-eng)":4.99,"Tatoeba (tur-eng)":7.72,"Tatoeba (tzl-eng)":38.49,"Tatoeba (uig-eng)":0.87,"Tatoeba (ukr-eng)":9.12,"Tatoeba (urd-eng)":0.0,"Tatoeba (uzb-eng)":5.48,"Tatoeba (vie-eng)":8.45,"Tatoeba (war-eng)":13.75,"Tatoeba (wuu-eng)":1.44,"Tatoeba (xho-eng)":9.15,"Tatoeba (yid-eng)":0.28,"Tatoeba (yue-eng)":0.98,"Tatoeba (zsm-eng)":35.71} +{"Rank":6,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":19.48,"BUCC (de-en)":90.99,"BUCC (fr-en)":88.55,"BUCC (ru-en)":2.07,"BUCC (zh-en)":1.49,"Tatoeba (afr-eng)":33.47,"Tatoeba (amh-eng)":0.01,"Tatoeba (ang-eng)":30.74,"Tatoeba (ara-eng)":0.47,"Tatoeba (arq-eng)":0.34,"Tatoeba (arz-eng)":0.14,"Tatoeba (ast-eng)":51.74,"Tatoeba (awa-eng)":0.49,"Tatoeba (aze-eng)":7.43,"Tatoeba (bel-eng)":3.45,"Tatoeba (ben-eng)":0.06,"Tatoeba (ber-eng)":5.79,"Tatoeba (bos-eng)":17.43,"Tatoeba (bre-eng)":5.69,"Tatoeba (bul-eng)":7.55,"Tatoeba (cat-eng)":48.06,"Tatoeba (cbk-eng)":54.56,"Tatoeba (ceb-eng)":8.72,"Tatoeba (ces-eng)":8.76,"Tatoeba (cha-eng)":27.56,"Tatoeba (cmn-eng)":2.26,"Tatoeba (cor-eng)":3.69,"Tatoeba (csb-eng)":13.18,"Tatoeba (cym-eng)":6.97,"Tatoeba (dan-eng)":47.36,"Tatoeba (deu-eng)":91.54,"Tatoeba (dsb-eng)":13.2,"Tatoeba (dtp-eng)":4.54,"Tatoeba (ell-eng)":0.55,"Tatoeba (epo-eng)":27.86,"Tatoeba (est-eng)":5.13,"Tatoeba (eus-eng)":10.23,"Tatoeba (fao-eng)":21.44,"Tatoeba (fin-eng)":6.62,"Tatoeba (fra-eng)":79.66,"Tatoeba (fry-eng)":32.92,"Tatoeba (gla-eng)":2.87,"Tatoeba (gle-eng)":3.26,"Tatoeba (glg-eng)":63.81,"Tatoeba (gsw-eng)":29.71,"Tatoeba (heb-eng)":0.33,"Tatoeba (hin-eng)":0.25,"Tatoeba (hrv-eng)":17.16,"Tatoeba (hsb-eng)":12.02,"Tatoeba (hun-eng)":7.21,"Tatoeba (hye-eng)":0.78,"Tatoeba (ido-eng)":40.83,"Tatoeba (ile-eng)":54.95,"Tatoeba (ina-eng)":72.28,"Tatoeba (ind-eng)":30.95,"Tatoeba (isl-eng)":11.29,"Tatoeba (ita-eng)":73.83,"Tatoeba (jav-eng)":8.66,"Tatoeba (jpn-eng)":0.61,"Tatoeba (kab-eng)":1.78,"Tatoeba (kat-eng)":0.79,"Tatoeba (kaz-eng)":0.95,"Tatoeba (khm-eng)":0.49,"Tatoeba (kor-eng)":1.87,"Tatoeba (kur-eng)":10.91,"Tatoeba (kzj-eng)":5.72,"Tatoeba (lat-eng)":18.24,"Tatoeba (lfn-eng)":43.49,"Tatoeba (lit-eng)":7.13,"Tatoeba (lvs-eng)":7.04,"Tatoeba (mal-eng)":0.44,"Tatoeba (mar-eng)":0.03,"Tatoeba (max-eng)":18.99,"Tatoeba (mhr-eng)":1.11,"Tatoeba (mkd-eng)":2.49,"Tatoeba (mon-eng)":2.01,"Tatoeba (nds-eng)":39.96,"Tatoeba (nld-eng)":58.86,"Tatoeba (nno-eng)":29.07,"Tatoeba (nob-eng)":40.25,"Tatoeba (nov-eng)":50.19,"Tatoeba (oci-eng)":30.72,"Tatoeba (orv-eng)":0.85,"Tatoeba (pam-eng)":7.21,"Tatoeba (pes-eng)":0.53,"Tatoeba (pms-eng)":31.07,"Tatoeba (pol-eng)":18.06,"Tatoeba (por-eng)":81.92,"Tatoeba (ron-eng)":62.6,"Tatoeba (rus-eng)":22.24,"Tatoeba (slk-eng)":10.59,"Tatoeba (slv-eng)":11.4,"Tatoeba (spa-eng)":85.78,"Tatoeba (sqi-eng)":14.92,"Tatoeba (srp-eng)":9.87,"Tatoeba (swe-eng)":55.08,"Tatoeba (swg-eng)":32.66,"Tatoeba (swh-eng)":7.64,"Tatoeba (tam-eng)":0.49,"Tatoeba (tat-eng)":1.28,"Tatoeba (tel-eng)":0.45,"Tatoeba (tgl-eng)":23.63,"Tatoeba (tha-eng)":0.61,"Tatoeba (tuk-eng)":5.71,"Tatoeba (tur-eng)":8.25,"Tatoeba (tzl-eng)":28.4,"Tatoeba (uig-eng)":0.57,"Tatoeba (ukr-eng)":5.69,"Tatoeba (urd-eng)":0.0,"Tatoeba (uzb-eng)":4.19,"Tatoeba (vie-eng)":9.07,"Tatoeba (war-eng)":12.31,"Tatoeba (wuu-eng)":1.38,"Tatoeba (xho-eng)":7.6,"Tatoeba (yid-eng)":0.41,"Tatoeba (yue-eng)":1.31,"Tatoeba (zsm-eng)":29.74} +{"Rank":7,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":17.26,"BUCC (de-en)":87.0,"BUCC (fr-en)":88.91,"BUCC (ru-en)":0.44,"BUCC (zh-en)":0.95,"Tatoeba (afr-eng)":23.7,"Tatoeba (amh-eng)":0.65,"Tatoeba (ang-eng)":30.98,"Tatoeba (ara-eng)":0.48,"Tatoeba (arq-eng)":0.68,"Tatoeba (arz-eng)":0.22,"Tatoeba (ast-eng)":55.3,"Tatoeba (awa-eng)":1.03,"Tatoeba (aze-eng)":5.83,"Tatoeba (bel-eng)":1.66,"Tatoeba (ben-eng)":0.0,"Tatoeba (ber-eng)":5.62,"Tatoeba (bos-eng)":12.23,"Tatoeba (bre-eng)":5.84,"Tatoeba (bul-eng)":1.35,"Tatoeba (cat-eng)":48.56,"Tatoeba (cbk-eng)":46.97,"Tatoeba (ceb-eng)":9.79,"Tatoeba (ces-eng)":6.0,"Tatoeba (cha-eng)":24.21,"Tatoeba (cmn-eng)":2.26,"Tatoeba (cor-eng)":4.03,"Tatoeba (csb-eng)":9.53,"Tatoeba (cym-eng)":9.17,"Tatoeba (dan-eng)":34.63,"Tatoeba (deu-eng)":89.31,"Tatoeba (dsb-eng)":9.68,"Tatoeba (dtp-eng)":4.66,"Tatoeba (ell-eng)":0.77,"Tatoeba (epo-eng)":26.88,"Tatoeba (est-eng)":5.19,"Tatoeba (eus-eng)":9.46,"Tatoeba (fao-eng)":21.59,"Tatoeba (fin-eng)":5.66,"Tatoeba (fra-eng)":79.71,"Tatoeba (fry-eng)":28.29,"Tatoeba (gla-eng)":2.34,"Tatoeba (gle-eng)":3.55,"Tatoeba (glg-eng)":56.25,"Tatoeba (gsw-eng)":24.25,"Tatoeba (heb-eng)":0.57,"Tatoeba (hin-eng)":0.12,"Tatoeba (hrv-eng)":10.29,"Tatoeba (hsb-eng)":9.52,"Tatoeba (hun-eng)":6.22,"Tatoeba (hye-eng)":0.81,"Tatoeba (ido-eng)":41.11,"Tatoeba (ile-eng)":54.0,"Tatoeba (ina-eng)":75.47,"Tatoeba (ind-eng)":13.02,"Tatoeba (isl-eng)":8.98,"Tatoeba (ita-eng)":67.23,"Tatoeba (jav-eng)":8.54,"Tatoeba (jpn-eng)":0.99,"Tatoeba (kab-eng)":1.85,"Tatoeba (kat-eng)":1.37,"Tatoeba (kaz-eng)":0.67,"Tatoeba (khm-eng)":0.56,"Tatoeba (kor-eng)":1.73,"Tatoeba (kur-eng)":9.23,"Tatoeba (kzj-eng)":5.38,"Tatoeba (lat-eng)":21.3,"Tatoeba (lfn-eng)":40.48,"Tatoeba (lit-eng)":5.38,"Tatoeba (lvs-eng)":6.83,"Tatoeba (mal-eng)":0.45,"Tatoeba (mar-eng)":0.01,"Tatoeba (max-eng)":16.44,"Tatoeba (mhr-eng)":0.33,"Tatoeba (mkd-eng)":0.4,"Tatoeba (mon-eng)":2.48,"Tatoeba (nds-eng)":34.66,"Tatoeba (nld-eng)":42.72,"Tatoeba (nno-eng)":24.08,"Tatoeba (nob-eng)":34.17,"Tatoeba (nov-eng)":55.01,"Tatoeba (oci-eng)":29.15,"Tatoeba (orv-eng)":0.2,"Tatoeba (pam-eng)":6.99,"Tatoeba (pes-eng)":0.9,"Tatoeba (pms-eng)":30.8,"Tatoeba (pol-eng)":12.81,"Tatoeba (por-eng)":73.45,"Tatoeba (ron-eng)":54.86,"Tatoeba (rus-eng)":2.43,"Tatoeba (slk-eng)":8.35,"Tatoeba (slv-eng)":9.3,"Tatoeba (spa-eng)":78.87,"Tatoeba (sqi-eng)":11.74,"Tatoeba (srp-eng)":5.83,"Tatoeba (swe-eng)":35.41,"Tatoeba (swg-eng)":28.18,"Tatoeba (swh-eng)":7.53,"Tatoeba (tam-eng)":0.36,"Tatoeba (tat-eng)":1.01,"Tatoeba (tel-eng)":1.1,"Tatoeba (tgl-eng)":12.4,"Tatoeba (tha-eng)":1.58,"Tatoeba (tuk-eng)":4.95,"Tatoeba (tur-eng)":6.45,"Tatoeba (tzl-eng)":37.82,"Tatoeba (uig-eng)":0.67,"Tatoeba (ukr-eng)":1.88,"Tatoeba (urd-eng)":0.0,"Tatoeba (uzb-eng)":4.79,"Tatoeba (vie-eng)":7.03,"Tatoeba (war-eng)":9.68,"Tatoeba (wuu-eng)":1.28,"Tatoeba (xho-eng)":10.64,"Tatoeba (yid-eng)":0.57,"Tatoeba (yue-eng)":0.88,"Tatoeba (zsm-eng)":14.67} +{"Rank":8,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":4.54,"BUCC (de-en)":0.18,"BUCC (fr-en)":0.08,"BUCC (ru-en)":0.15,"BUCC (zh-en)":0.05,"Tatoeba (afr-eng)":4.82,"Tatoeba (amh-eng)":1.18,"Tatoeba (ang-eng)":8.54,"Tatoeba (ara-eng)":0.63,"Tatoeba (arq-eng)":0.4,"Tatoeba (arz-eng)":0.63,"Tatoeba (ast-eng)":11.69,"Tatoeba (awa-eng)":0.0,"Tatoeba (aze-eng)":3.22,"Tatoeba (bel-eng)":1.75,"Tatoeba (ben-eng)":0.2,"Tatoeba (ber-eng)":7.0,"Tatoeba (bos-eng)":9.31,"Tatoeba (bre-eng)":4.17,"Tatoeba (bul-eng)":1.29,"Tatoeba (cat-eng)":7.73,"Tatoeba (cbk-eng)":5.61,"Tatoeba (ceb-eng)":4.88,"Tatoeba (ces-eng)":3.55,"Tatoeba (cha-eng)":19.29,"Tatoeba (cmn-eng)":0.5,"Tatoeba (cor-eng)":4.15,"Tatoeba (csb-eng)":5.69,"Tatoeba (cym-eng)":8.4,"Tatoeba (dan-eng)":6.99,"Tatoeba (deu-eng)":3.67,"Tatoeba (dsb-eng)":5.33,"Tatoeba (dtp-eng)":4.25,"Tatoeba (ell-eng)":0.63,"Tatoeba (epo-eng)":2.45,"Tatoeba (est-eng)":2.69,"Tatoeba (eus-eng)":4.69,"Tatoeba (fao-eng)":7.61,"Tatoeba (fin-eng)":3.36,"Tatoeba (fra-eng)":7.0,"Tatoeba (fry-eng)":12.36,"Tatoeba (gla-eng)":3.07,"Tatoeba (gle-eng)":4.81,"Tatoeba (glg-eng)":8.12,"Tatoeba (gsw-eng)":18.87,"Tatoeba (heb-eng)":0.68,"Tatoeba (hin-eng)":0.1,"Tatoeba (hrv-eng)":5.41,"Tatoeba (hsb-eng)":6.32,"Tatoeba (hun-eng)":3.42,"Tatoeba (hye-eng)":0.97,"Tatoeba (ido-eng)":7.1,"Tatoeba (ile-eng)":13.61,"Tatoeba (ina-eng)":8.57,"Tatoeba (ind-eng)":7.26,"Tatoeba (isl-eng)":4.09,"Tatoeba (ita-eng)":5.54,"Tatoeba (jav-eng)":11.43,"Tatoeba (jpn-eng)":0.2,"Tatoeba (kab-eng)":2.71,"Tatoeba (kat-eng)":1.11,"Tatoeba (kaz-eng)":1.17,"Tatoeba (khm-eng)":0.55,"Tatoeba (kor-eng)":0.5,"Tatoeba (kur-eng)":8.55,"Tatoeba (kzj-eng)":4.61,"Tatoeba (lat-eng)":4.07,"Tatoeba (lfn-eng)":2.83,"Tatoeba (lit-eng)":0.95,"Tatoeba (lvs-eng)":3.25,"Tatoeba (mal-eng)":0.29,"Tatoeba (mar-eng)":0.2,"Tatoeba (max-eng)":14.53,"Tatoeba (mhr-eng)":0.2,"Tatoeba (mkd-eng)":0.2,"Tatoeba (mon-eng)":1.1,"Tatoeba (nds-eng)":10.37,"Tatoeba (nld-eng)":9.5,"Tatoeba (nno-eng)":4.49,"Tatoeba (nob-eng)":4.95,"Tatoeba (nov-eng)":14.53,"Tatoeba (oci-eng)":5.8,"Tatoeba (orv-eng)":0.24,"Tatoeba (pam-eng)":6.65,"Tatoeba (pes-eng)":0.5,"Tatoeba (pms-eng)":8.05,"Tatoeba (pol-eng)":5.13,"Tatoeba (por-eng)":5.87,"Tatoeba (ron-eng)":6.76,"Tatoeba (rus-eng)":0.2,"Tatoeba (slk-eng)":4.23,"Tatoeba (slv-eng)":6.05,"Tatoeba (spa-eng)":5.03,"Tatoeba (sqi-eng)":4.36,"Tatoeba (srp-eng)":1.77,"Tatoeba (swe-eng)":6.72,"Tatoeba (swg-eng)":8.54,"Tatoeba (swh-eng)":11.49,"Tatoeba (tam-eng)":1.3,"Tatoeba (tat-eng)":0.77,"Tatoeba (tel-eng)":0.85,"Tatoeba (tgl-eng)":2.61,"Tatoeba (tha-eng)":0.69,"Tatoeba (tuk-eng)":5.76,"Tatoeba (tur-eng)":5.24,"Tatoeba (tzl-eng)":15.51,"Tatoeba (uig-eng)":0.6,"Tatoeba (ukr-eng)":1.23,"Tatoeba (urd-eng)":0.4,"Tatoeba (uzb-eng)":4.73,"Tatoeba (vie-eng)":6.55,"Tatoeba (war-eng)":4.12,"Tatoeba (wuu-eng)":0.2,"Tatoeba (xho-eng)":4.33,"Tatoeba (yid-eng)":0.59,"Tatoeba (yue-eng)":0.5,"Tatoeba (zsm-eng)":7.27} +{"Rank":9,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":"","BUCC (de-en)":0.18,"BUCC (fr-en)":0.19,"BUCC (ru-en)":0.1,"BUCC (zh-en)":0.0,"Tatoeba (afr-eng)":"","Tatoeba (amh-eng)":"","Tatoeba (ang-eng)":"","Tatoeba (ara-eng)":"","Tatoeba (arq-eng)":"","Tatoeba (arz-eng)":"","Tatoeba (ast-eng)":"","Tatoeba (awa-eng)":"","Tatoeba (aze-eng)":"","Tatoeba (bel-eng)":"","Tatoeba (ben-eng)":"","Tatoeba (ber-eng)":"","Tatoeba (bos-eng)":"","Tatoeba (bre-eng)":"","Tatoeba (bul-eng)":"","Tatoeba (cat-eng)":"","Tatoeba (cbk-eng)":"","Tatoeba (ceb-eng)":"","Tatoeba (ces-eng)":"","Tatoeba (cha-eng)":"","Tatoeba (cmn-eng)":"","Tatoeba (cor-eng)":"","Tatoeba (csb-eng)":"","Tatoeba (cym-eng)":"","Tatoeba (dan-eng)":"","Tatoeba (deu-eng)":"","Tatoeba (dsb-eng)":"","Tatoeba (dtp-eng)":"","Tatoeba (ell-eng)":"","Tatoeba (epo-eng)":"","Tatoeba (est-eng)":"","Tatoeba (eus-eng)":"","Tatoeba (fao-eng)":"","Tatoeba (fin-eng)":"","Tatoeba (fra-eng)":"","Tatoeba (fry-eng)":"","Tatoeba (gla-eng)":"","Tatoeba (gle-eng)":"","Tatoeba (glg-eng)":"","Tatoeba (gsw-eng)":"","Tatoeba (heb-eng)":"","Tatoeba (hin-eng)":"","Tatoeba (hrv-eng)":"","Tatoeba (hsb-eng)":"","Tatoeba (hun-eng)":"","Tatoeba (hye-eng)":"","Tatoeba (ido-eng)":"","Tatoeba (ile-eng)":"","Tatoeba (ina-eng)":"","Tatoeba (ind-eng)":"","Tatoeba (isl-eng)":"","Tatoeba (ita-eng)":"","Tatoeba (jav-eng)":"","Tatoeba (jpn-eng)":"","Tatoeba (kab-eng)":"","Tatoeba (kat-eng)":"","Tatoeba (kaz-eng)":"","Tatoeba (khm-eng)":"","Tatoeba (kor-eng)":"","Tatoeba (kur-eng)":"","Tatoeba (kzj-eng)":"","Tatoeba (lat-eng)":"","Tatoeba (lfn-eng)":"","Tatoeba (lit-eng)":"","Tatoeba (lvs-eng)":"","Tatoeba (mal-eng)":"","Tatoeba (mar-eng)":"","Tatoeba (max-eng)":"","Tatoeba (mhr-eng)":"","Tatoeba (mkd-eng)":"","Tatoeba (mon-eng)":"","Tatoeba (nds-eng)":"","Tatoeba (nld-eng)":"","Tatoeba (nno-eng)":"","Tatoeba (nob-eng)":"","Tatoeba (nov-eng)":"","Tatoeba (oci-eng)":"","Tatoeba (orv-eng)":"","Tatoeba (pam-eng)":"","Tatoeba (pes-eng)":"","Tatoeba (pms-eng)":"","Tatoeba (pol-eng)":"","Tatoeba (por-eng)":"","Tatoeba (ron-eng)":"","Tatoeba (rus-eng)":"","Tatoeba (slk-eng)":"","Tatoeba (slv-eng)":"","Tatoeba (spa-eng)":"","Tatoeba (sqi-eng)":"","Tatoeba (srp-eng)":"","Tatoeba (swe-eng)":"","Tatoeba (swg-eng)":"","Tatoeba (swh-eng)":"","Tatoeba (tam-eng)":"","Tatoeba (tat-eng)":"","Tatoeba (tel-eng)":"","Tatoeba (tgl-eng)":"","Tatoeba (tha-eng)":"","Tatoeba (tuk-eng)":"","Tatoeba (tur-eng)":"","Tatoeba (tzl-eng)":"","Tatoeba (uig-eng)":"","Tatoeba (ukr-eng)":"","Tatoeba (urd-eng)":"","Tatoeba (uzb-eng)":"","Tatoeba (vie-eng)":"","Tatoeba (war-eng)":"","Tatoeba (wuu-eng)":"","Tatoeba (xho-eng)":"","Tatoeba (yid-eng)":"","Tatoeba (yue-eng)":"","Tatoeba (zsm-eng)":""} diff --git a/all_data_tasks/8/default.jsonl b/all_data_tasks/8/default.jsonl index ac03a90aac5232ef609c766d254351531a9d9b0a..0c47d2212c9f31ddfbd2d9f9cc0525de5196f5d7 100644 --- a/all_data_tasks/8/default.jsonl +++ b/all_data_tasks/8/default.jsonl @@ -1,88 +1,112 @@ -{"index":17,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":75.09,"AmazonReviewsClassification (zh)":53.98,"IFlyTek":54.52,"JDReview":86.51,"MassiveIntentClassification (zh-CN)":81.09,"MassiveScenarioClassification (zh-CN)":86.06,"MultilingualSentiment":76.88,"OnlineShopping":94.3,"TNews":52.97,"Waimai":89.47} -{"index":234,"Rank":2,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.09,"AmazonReviewsClassification (zh)":53.98,"IFlyTek":54.52,"JDReview":86.51,"MassiveIntentClassification (zh-CN)":81.09,"MassiveScenarioClassification (zh-CN)":86.06,"MultilingualSentiment":76.88,"OnlineShopping":94.3,"TNews":52.97,"Waimai":89.47} -{"index":142,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.09,"AmazonReviewsClassification (zh)":53.98,"IFlyTek":54.52,"JDReview":86.51,"MassiveIntentClassification (zh-CN)":81.09,"MassiveScenarioClassification (zh-CN)":86.06,"MultilingualSentiment":76.88,"OnlineShopping":94.3,"TNews":52.97,"Waimai":89.47} -{"index":207,"Rank":4,"Model":"xiaobu-embedding-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.67,"AmazonReviewsClassification (zh)":50.07,"IFlyTek":51.76,"JDReview":89.08,"MassiveIntentClassification (zh-CN)":77.45,"MassiveScenarioClassification (zh-CN)":85.3,"MultilingualSentiment":79.45,"OnlineShopping":94.9,"TNews":54.64,"Waimai":89.34} -{"index":253,"Rank":5,"Model":"piccolo-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.59,"AmazonReviewsClassification (zh)":49.44,"IFlyTek":52.1,"JDReview":88.57,"MassiveIntentClassification (zh-CN)":77.71,"MassiveScenarioClassification (zh-CN)":85.63,"MultilingualSentiment":79.09,"OnlineShopping":94.62,"TNews":54.52,"Waimai":89.59} -{"index":169,"Rank":6,"Model":"zpoint_large_embedding_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.43,"AmazonReviewsClassification (zh)":49.72,"IFlyTek":51.7,"JDReview":88.87,"MassiveIntentClassification (zh-CN)":76.79,"MassiveScenarioClassification (zh-CN)":84.96,"MultilingualSentiment":79.22,"OnlineShopping":94.88,"TNews":54.35,"Waimai":89.36} -{"index":38,"Rank":7,"Model":"Yinka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.3,"AmazonReviewsClassification (zh)":49.5,"IFlyTek":51.77,"JDReview":88.48,"MassiveIntentClassification (zh-CN)":76.96,"MassiveScenarioClassification (zh-CN)":84.64,"MultilingualSentiment":78.97,"OnlineShopping":94.56,"TNews":54.39,"Waimai":89.42} -{"index":50,"Rank":8,"Model":"IYun-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.18,"AmazonReviewsClassification (zh)":49.67,"IFlyTek":52.64,"JDReview":87.6,"MassiveIntentClassification (zh-CN)":77.43,"MassiveScenarioClassification (zh-CN)":83.87,"MultilingualSentiment":78.48,"OnlineShopping":94.34,"TNews":54.37,"Waimai":89.23} -{"index":276,"Rank":9,"Model":"AGE_Hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.14,"AmazonReviewsClassification (zh)":49.68,"IFlyTek":51.77,"JDReview":86.94,"MassiveIntentClassification (zh-CN)":80.6,"MassiveScenarioClassification (zh-CN)":87.42,"MultilingualSentiment":75.92,"OnlineShopping":94.03,"TNews":52.69,"Waimai":88.23} -{"index":29,"Rank":10,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.93,"AmazonReviewsClassification (zh)":53.0,"IFlyTek":49.94,"JDReview":88.91,"MassiveIntentClassification (zh-CN)":78.96,"MassiveScenarioClassification (zh-CN)":81.54,"MultilingualSentiment":78.91,"OnlineShopping":94.59,"TNews":50.26,"Waimai":89.26} -{"index":15,"Rank":11,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":73.36,"AmazonReviewsClassification (zh)":52.95,"IFlyTek":53.77,"JDReview":88.2,"MassiveIntentClassification (zh-CN)":76.25,"MassiveScenarioClassification (zh-CN)":77.26,"MultilingualSentiment":77.42,"OnlineShopping":94.48,"TNews":51.24,"Waimai":88.63} -{"index":315,"Rank":12,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.84,"AmazonReviewsClassification (zh)":48.3,"IFlyTek":50.75,"JDReview":87.69,"MassiveIntentClassification (zh-CN)":74.91,"MassiveScenarioClassification (zh-CN)":81.28,"MultilingualSentiment":76.83,"OnlineShopping":94.42,"TNews":52.62,"Waimai":88.77} -{"index":129,"Rank":13,"Model":"acge_text_embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.75,"AmazonReviewsClassification (zh)":48.54,"IFlyTek":51.77,"JDReview":86.7,"MassiveIntentClassification (zh-CN)":75.87,"MassiveScenarioClassification (zh-CN)":78.44,"MultilingualSentiment":77.53,"OnlineShopping":93.71,"TNews":53.69,"Waimai":88.53} -{"index":12,"Rank":14,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.74,"AmazonReviewsClassification (zh)":46.18,"IFlyTek":51.8,"JDReview":86.02,"MassiveIntentClassification (zh-CN)":73.85,"MassiveScenarioClassification (zh-CN)":77.13,"MultilingualSentiment":76.35,"OnlineShopping":93.2,"TNews":53.06,"Waimai":88.1} -{"index":116,"Rank":15,"Model":"stella-mrl-large-zh-v3.5-1792d-1024<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.56,"AmazonReviewsClassification (zh)":46.59,"IFlyTek":50.74,"JDReview":86.1,"MassiveIntentClassification (zh-CN)":73.28,"MassiveScenarioClassification (zh-CN)":76.2,"MultilingualSentiment":76.64,"OnlineShopping":93.39,"TNews":53.1,"Waimai":88.01} -{"index":155,"Rank":16,"Model":"stella-mrl-large-zh-v3.5-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.56,"AmazonReviewsClassification (zh)":46.59,"IFlyTek":50.74,"JDReview":86.1,"MassiveIntentClassification (zh-CN)":73.28,"MassiveScenarioClassification (zh-CN)":76.2,"MultilingualSentiment":76.64,"OnlineShopping":93.39,"TNews":53.1,"Waimai":88.01} -{"index":154,"Rank":17,"Model":"stella-large-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.5,"AmazonReviewsClassification (zh)":46.32,"IFlyTek":50.63,"JDReview":86.12,"MassiveIntentClassification (zh-CN)":73.28,"MassiveScenarioClassification (zh-CN)":76.3,"MultilingualSentiment":76.45,"OnlineShopping":93.24,"TNews":53.11,"Waimai":88.08} -{"index":105,"Rank":18,"Model":"alime-embedding-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.35,"AmazonReviewsClassification (zh)":46.95,"IFlyTek":49.67,"JDReview":86.53,"MassiveIntentClassification (zh-CN)":73.39,"MassiveScenarioClassification (zh-CN)":76.04,"MultilingualSentiment":76.4,"OnlineShopping":92.6,"TNews":52.04,"Waimai":88.49} -{"index":286,"Rank":19,"Model":"gte-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.34,"AmazonReviewsClassification (zh)":47.23,"IFlyTek":49.6,"JDReview":86.72,"MassiveIntentClassification (zh-CN)":73.31,"MassiveScenarioClassification (zh-CN)":75.71,"MultilingualSentiment":76.48,"OnlineShopping":92.68,"TNews":51.98,"Waimai":88.37} -{"index":284,"Rank":20,"Model":"gte-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.26,"AmazonReviewsClassification (zh)":45.82,"IFlyTek":48.62,"JDReview":85.95,"MassiveIntentClassification (zh-CN)":73.32,"MassiveScenarioClassification (zh-CN)":76.79,"MultilingualSentiment":75.79,"OnlineShopping":93.36,"TNews":53.65,"Waimai":88.07} -{"index":206,"Rank":21,"Model":"xiaobu-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.2,"AmazonReviewsClassification (zh)":46.72,"IFlyTek":49.74,"JDReview":86.74,"MassiveIntentClassification (zh-CN)":72.87,"MassiveScenarioClassification (zh-CN)":75.47,"MultilingualSentiment":76.28,"OnlineShopping":92.49,"TNews":52.16,"Waimai":88.36} -{"index":173,"Rank":22,"Model":"stella-base-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.12,"AmazonReviewsClassification (zh)":46.34,"IFlyTek":49.93,"JDReview":86.27,"MassiveIntentClassification (zh-CN)":72.05,"MassiveScenarioClassification (zh-CN)":75.3,"MultilingualSentiment":76.22,"OnlineShopping":93.53,"TNews":53.2,"Waimai":87.27} -{"index":16,"Rank":23,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.12,"AmazonReviewsClassification (zh)":47.21,"IFlyTek":44.85,"JDReview":85.82,"MassiveIntentClassification (zh-CN)":76.88,"MassiveScenarioClassification (zh-CN)":80.76,"MultilingualSentiment":74.46,"OnlineShopping":93.5,"TNews":49.95,"Waimai":86.63} -{"index":233,"Rank":24,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.12,"AmazonReviewsClassification (zh)":47.21,"IFlyTek":44.85,"JDReview":85.82,"MassiveIntentClassification (zh-CN)":76.88,"MassiveScenarioClassification (zh-CN)":80.76,"MultilingualSentiment":74.46,"OnlineShopping":93.5,"TNews":49.95,"Waimai":86.63} -{"index":208,"Rank":25,"Model":"lim_base_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.83,"AmazonReviewsClassification (zh)":46.67,"IFlyTek":47.34,"JDReview":85.67,"MassiveIntentClassification (zh-CN)":72.38,"MassiveScenarioClassification (zh-CN)":74.87,"MultilingualSentiment":76.27,"OnlineShopping":93.05,"TNews":53.27,"Waimai":87.96} -{"index":180,"Rank":26,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":70.17,"AmazonReviewsClassification (zh)":46.24,"IFlyTek":45.05,"JDReview":85.82,"MassiveIntentClassification (zh-CN)":74.99,"MassiveScenarioClassification (zh-CN)":79.93,"MultilingualSentiment":73.31,"OnlineShopping":92.19,"TNews":47.05,"Waimai":86.94} -{"index":46,"Rank":27,"Model":"Dmeta-embedding-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.0,"AmazonReviewsClassification (zh)":44.93,"IFlyTek":48.3,"JDReview":85.07,"MassiveIntentClassification (zh-CN)":71.16,"MassiveScenarioClassification (zh-CN)":73.54,"MultilingualSentiment":75.16,"OnlineShopping":93.25,"TNews":52.41,"Waimai":86.21} -{"index":47,"Rank":28,"Model":"Dmeta-embedding-zh-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.89,"AmazonReviewsClassification (zh)":44.88,"IFlyTek":49.11,"JDReview":85.57,"MassiveIntentClassification (zh-CN)":70.78,"MassiveScenarioClassification (zh-CN)":73.16,"MultilingualSentiment":74.39,"OnlineShopping":93.12,"TNews":51.8,"Waimai":86.18} -{"index":27,"Rank":29,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":69.13,"AmazonReviewsClassification (zh)":41.38,"IFlyTek":48.74,"JDReview":85.14,"MassiveIntentClassification (zh-CN)":68.84,"MassiveScenarioClassification (zh-CN)":74.7,"MultilingualSentiment":72.97,"OnlineShopping":91.43,"TNews":52.1,"Waimai":86.9} -{"index":41,"Rank":30,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.09,"AmazonReviewsClassification (zh)":43.32,"IFlyTek":47.08,"JDReview":84.48,"MassiveIntentClassification (zh-CN)":70.91,"MassiveScenarioClassification (zh-CN)":74.94,"MultilingualSentiment":72.68,"OnlineShopping":92.11,"TNews":49.85,"Waimai":86.44} -{"index":20,"Rank":31,"Model":"tao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.05,"AmazonReviewsClassification (zh)":40.81,"IFlyTek":48.01,"JDReview":87.02,"MassiveIntentClassification (zh-CN)":68.27,"MassiveScenarioClassification (zh-CN)":73.13,"MultilingualSentiment":73.4,"OnlineShopping":91.82,"TNews":51.93,"Waimai":87.1} -{"index":175,"Rank":32,"Model":"stella-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.05,"AmazonReviewsClassification (zh)":40.82,"IFlyTek":47.99,"JDReview":86.98,"MassiveIntentClassification (zh-CN)":68.26,"MassiveScenarioClassification (zh-CN)":73.12,"MultilingualSentiment":73.41,"OnlineShopping":91.81,"TNews":51.93,"Waimai":87.12} -{"index":21,"Rank":33,"Model":"tao-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.05,"AmazonReviewsClassification (zh)":40.8,"IFlyTek":47.99,"JDReview":87.02,"MassiveIntentClassification (zh-CN)":68.26,"MassiveScenarioClassification (zh-CN)":73.13,"MultilingualSentiment":73.39,"OnlineShopping":91.81,"TNews":51.93,"Waimai":87.1} -{"index":226,"Rank":34,"Model":"m3e-ernie-xbase-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.83,"AmazonReviewsClassification (zh)":47.79,"IFlyTek":44.86,"JDReview":88.48,"MassiveIntentClassification (zh-CN)":61.29,"MassiveScenarioClassification (zh-CN)":66.9,"MultilingualSentiment":78.46,"OnlineShopping":92.58,"TNews":50.02,"Waimai":89.09} -{"index":26,"Rank":35,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":68.58,"AmazonReviewsClassification (zh)":41.94,"IFlyTek":45.32,"JDReview":85.38,"MassiveIntentClassification (zh-CN)":66.96,"MassiveScenarioClassification (zh-CN)":73.39,"MultilingualSentiment":73.7,"OnlineShopping":91.66,"TNews":52.05,"Waimai":86.83} -{"index":172,"Rank":36,"Model":"stella-base-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.29,"AmazonReviewsClassification (zh)":39.64,"IFlyTek":47.9,"JDReview":84.78,"MassiveIntentClassification (zh-CN)":68.09,"MassiveScenarioClassification (zh-CN)":73.22,"MultilingualSentiment":71.67,"OnlineShopping":91.38,"TNews":51.25,"Waimai":86.68} -{"index":51,"Rank":37,"Model":"mist-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.21,"AmazonReviewsClassification (zh)":39.32,"IFlyTek":47.96,"JDReview":84.9,"MassiveIntentClassification (zh-CN)":67.65,"MassiveScenarioClassification (zh-CN)":72.97,"MultilingualSentiment":71.54,"OnlineShopping":91.3,"TNews":51.56,"Waimai":86.66} -{"index":221,"Rank":38,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":68.2,"AmazonReviewsClassification (zh)":44.44,"IFlyTek":43.96,"JDReview":86.92,"MassiveIntentClassification (zh-CN)":67.23,"MassiveScenarioClassification (zh-CN)":74.88,"MultilingualSentiment":72.47,"OnlineShopping":89.59,"TNews":48.26,"Waimai":86.08} -{"index":23,"Rank":39,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":68.07,"AmazonReviewsClassification (zh)":40.15,"IFlyTek":48.62,"JDReview":83.62,"MassiveIntentClassification (zh-CN)":67.93,"MassiveScenarioClassification (zh-CN)":73.98,"MultilingualSentiment":70.67,"OnlineShopping":91.26,"TNews":51.08,"Waimai":85.36} -{"index":40,"Rank":40,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.89,"AmazonReviewsClassification (zh)":42.04,"IFlyTek":46.3,"JDReview":77.3,"MassiveIntentClassification (zh-CN)":70.82,"MassiveScenarioClassification (zh-CN)":75.21,"MultilingualSentiment":71.67,"OnlineShopping":91.53,"TNews":50.17,"Waimai":85.98} -{"index":171,"Rank":41,"Model":"stella-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.77,"AmazonReviewsClassification (zh)":40.25,"IFlyTek":47.46,"JDReview":84.99,"MassiveIntentClassification (zh-CN)":65.56,"MassiveScenarioClassification (zh-CN)":71.49,"MultilingualSentiment":72.48,"OnlineShopping":90.34,"TNews":50.78,"Waimai":86.62} -{"index":174,"Rank":42,"Model":"stella-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.62,"AmazonReviewsClassification (zh)":40.57,"IFlyTek":47.03,"JDReview":86.74,"MassiveIntentClassification (zh-CN)":64.02,"MassiveScenarioClassification (zh-CN)":68.95,"MultilingualSentiment":73.03,"OnlineShopping":90.75,"TNews":50.69,"Waimai":86.77} -{"index":220,"Rank":43,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":67.52,"AmazonReviewsClassification (zh)":43.02,"IFlyTek":44.42,"JDReview":85.33,"MassiveIntentClassification (zh-CN)":68.4,"MassiveScenarioClassification (zh-CN)":74.6,"MultilingualSentiment":71.9,"OnlineShopping":87.77,"TNews":48.28,"Waimai":83.99} -{"index":184,"Rank":44,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":67.34,"AmazonReviewsClassification (zh)":38.83,"IFlyTek":45.47,"JDReview":80.99,"MassiveIntentClassification (zh-CN)":71.12,"MassiveScenarioClassification (zh-CN)":76.83,"MultilingualSentiment":68.58,"OnlineShopping":90.81,"TNews":48.38,"Waimai":85.02} -{"index":39,"Rank":45,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.08,"AmazonReviewsClassification (zh)":39.68,"IFlyTek":47.0,"JDReview":77.34,"MassiveIntentClassification (zh-CN)":70.98,"MassiveScenarioClassification (zh-CN)":75.69,"MultilingualSentiment":69.17,"OnlineShopping":89.79,"TNews":49.77,"Waimai":84.34} -{"index":252,"Rank":46,"Model":"piccolo-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.03,"AmazonReviewsClassification (zh)":40.33,"IFlyTek":44.25,"JDReview":86.1,"MassiveIntentClassification (zh-CN)":68.0,"MassiveScenarioClassification (zh-CN)":72.08,"MultilingualSentiment":70.15,"OnlineShopping":90.27,"TNews":46.54,"Waimai":85.54} -{"index":251,"Rank":47,"Model":"piccolo-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.98,"AmazonReviewsClassification (zh)":40.24,"IFlyTek":44.35,"JDReview":84.26,"MassiveIntentClassification (zh-CN)":68.97,"MassiveScenarioClassification (zh-CN)":73.32,"MultilingualSentiment":70.25,"OnlineShopping":89.93,"TNews":46.81,"Waimai":84.7} -{"index":186,"Rank":48,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":65.85,"AmazonReviewsClassification (zh)":37.5,"IFlyTek":47.35,"JDReview":79.34,"MassiveIntentClassification (zh-CN)":68.24,"MassiveScenarioClassification (zh-CN)":74.47,"MultilingualSentiment":64.74,"OnlineShopping":88.73,"TNews":48.38,"Waimai":83.9} -{"index":183,"Rank":49,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":65.35,"AmazonReviewsClassification (zh)":37.23,"IFlyTek":44.93,"JDReview":76.21,"MassiveIntentClassification (zh-CN)":69.16,"MassiveScenarioClassification (zh-CN)":75.42,"MultilingualSentiment":65.28,"OnlineShopping":88.4,"TNews":47.06,"Waimai":84.42} -{"index":188,"Rank":50,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.31,"AmazonReviewsClassification (zh)":38.12,"IFlyTek":43.52,"JDReview":81.09,"MassiveIntentClassification (zh-CN)":66.91,"MassiveScenarioClassification (zh-CN)":74.72,"MultilingualSentiment":66.33,"OnlineShopping":88.35,"TNews":46.08,"Waimai":82.67} -{"index":280,"Rank":51,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.94,"AmazonReviewsClassification (zh)":34.94,"IFlyTek":47.36,"JDReview":79.57,"MassiveIntentClassification (zh-CN)":68.2,"MassiveScenarioClassification (zh-CN)":71.93,"MultilingualSentiment":63.29,"OnlineShopping":87.0,"TNews":47.65,"Waimai":84.54} -{"index":199,"Rank":52,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.94,"AmazonReviewsClassification (zh)":34.94,"IFlyTek":47.36,"JDReview":79.57,"MassiveIntentClassification (zh-CN)":68.2,"MassiveScenarioClassification (zh-CN)":71.93,"MultilingualSentiment":63.29,"OnlineShopping":87.0,"TNews":47.65,"Waimai":84.54} -{"index":288,"Rank":53,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.49,"AmazonReviewsClassification (zh)":38.69,"IFlyTek":41.15,"JDReview":82.83,"MassiveIntentClassification (zh-CN)":59.28,"MassiveScenarioClassification (zh-CN)":66.48,"MultilingualSentiment":68.2,"OnlineShopping":89.13,"TNews":49.65,"Waimai":84.96} -{"index":274,"Rank":54,"Model":"ZNV-Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.37,"AmazonReviewsClassification (zh)":29.75,"IFlyTek":50.93,"JDReview":84.93,"MassiveIntentClassification (zh-CN)":67.46,"MassiveScenarioClassification (zh-CN)":71.6,"MultilingualSentiment":55.9,"OnlineShopping":82.53,"TNews":55.08,"Waimai":81.16} -{"index":311,"Rank":55,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.31,"AmazonReviewsClassification (zh)":38.3,"IFlyTek":44.62,"JDReview":74.6,"MassiveIntentClassification (zh-CN)":64.81,"MassiveScenarioClassification (zh-CN)":71.4,"MultilingualSentiment":67.99,"OnlineShopping":88.94,"TNews":45.77,"Waimai":82.37} -{"index":33,"Rank":56,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":63.96,"AmazonReviewsClassification (zh)":35.91,"IFlyTek":45.49,"JDReview":80.04,"MassiveIntentClassification (zh-CN)":63.95,"MassiveScenarioClassification (zh-CN)":70.8,"MultilingualSentiment":63.06,"OnlineShopping":85.05,"TNews":48.15,"Waimai":83.18} -{"index":190,"Rank":57,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.85,"AmazonReviewsClassification (zh)":35.7,"IFlyTek":40.46,"JDReview":78.26,"MassiveIntentClassification (zh-CN)":63.75,"MassiveScenarioClassification (zh-CN)":72.39,"MultilingualSentiment":63.17,"OnlineShopping":87.11,"TNews":44.15,"Waimai":80.65} -{"index":49,"Rank":58,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":62.7,"AmazonReviewsClassification (zh)":38.25,"IFlyTek":43.13,"JDReview":69.08,"MassiveIntentClassification (zh-CN)":61.23,"MassiveScenarioClassification (zh-CN)":68.12,"MultilingualSentiment":67.83,"OnlineShopping":88.13,"TNews":44.42,"Waimai":84.15} -{"index":48,"Rank":59,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.54,"AmazonReviewsClassification (zh)":37.51,"IFlyTek":44.88,"JDReview":82.2,"MassiveIntentClassification (zh-CN)":57.34,"MassiveScenarioClassification (zh-CN)":62.36,"MultilingualSentiment":66.58,"OnlineShopping":88.19,"TNews":39.8,"Waimai":83.96} -{"index":277,"Rank":60,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":62.19,"AmazonReviewsClassification (zh)":34.12,"IFlyTek":42.05,"JDReview":82.14,"MassiveIntentClassification (zh-CN)":63.98,"MassiveScenarioClassification (zh-CN)":70.52,"MultilingualSentiment":60.98,"OnlineShopping":85.69,"TNews":43.01,"Waimai":77.22} -{"index":279,"Rank":61,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":61.0,"AmazonReviewsClassification (zh)":34.46,"IFlyTek":41.75,"JDReview":79.68,"MassiveIntentClassification (zh-CN)":57.47,"MassiveScenarioClassification (zh-CN)":65.32,"MultilingualSentiment":61.21,"OnlineShopping":84.3,"TNews":45.22,"Waimai":79.57} -{"index":53,"Rank":62,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":60.66,"AmazonReviewsClassification (zh)":33.77,"IFlyTek":41.54,"JDReview":81.56,"MassiveIntentClassification (zh-CN)":63.23,"MassiveScenarioClassification (zh-CN)":68.45,"MultilingualSentiment":58.97,"OnlineShopping":83.51,"TNews":38.92,"Waimai":76.01} -{"index":159,"Rank":63,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.87,"AmazonReviewsClassification (zh)":31.91,"IFlyTek":38.01,"JDReview":69.59,"MassiveIntentClassification (zh-CN)":62.08,"MassiveScenarioClassification (zh-CN)":68.88,"MultilingualSentiment":57.69,"OnlineShopping":75.64,"TNews":40.95,"Waimai":76.12} -{"index":289,"Rank":64,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.04,"AmazonReviewsClassification (zh)":21.96,"IFlyTek":20.35,"JDReview":55.5,"MassiveIntentClassification (zh-CN)":25.39,"MassiveScenarioClassification (zh-CN)":40.35,"MultilingualSentiment":38.94,"OnlineShopping":56.89,"TNews":26.79,"Waimai":56.22} -{"index":11,"Rank":76,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","AmazonReviewsClassification (zh)":30.89,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":40.62,"MassiveScenarioClassification (zh-CN)":50.22,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":67,"Rank":110,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":22.35,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":18.85,"MassiveScenarioClassification (zh-CN)":30.14,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":91,"Rank":134,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":24.27,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":46.24,"MassiveScenarioClassification (zh-CN)":49.38,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":92,"Rank":135,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":23.98,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":45.78,"MassiveScenarioClassification (zh-CN)":48.55,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":95,"Rank":138,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":33.75,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":"","MassiveScenarioClassification (zh-CN)":"","MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":126,"Rank":167,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":36.07,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":63.4,"MassiveScenarioClassification (zh-CN)":69.58,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":138,"Rank":178,"Model":"sgpt-bloom-1b7-nli<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":37.63,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":"","MassiveScenarioClassification (zh-CN)":"","MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":139,"Rank":179,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":32.63,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":67.07,"MassiveScenarioClassification (zh-CN)":73.95,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":185,"Rank":212,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":44.66,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":73.32,"MassiveScenarioClassification (zh-CN)":78.7,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":189,"Rank":214,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":"","IFlyTek":45.03,"JDReview":83.64,"MassiveIntentClassification (zh-CN)":"","MassiveScenarioClassification (zh-CN)":"","MultilingualSentiment":67.17,"OnlineShopping":90.41,"TNews":47.7,"Waimai":84.07} -{"index":191,"Rank":215,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":"","IFlyTek":46.23,"JDReview":84.17,"MassiveIntentClassification (zh-CN)":"","MassiveScenarioClassification (zh-CN)":"","MultilingualSentiment":68.36,"OnlineShopping":90.93,"TNews":47.8,"Waimai":84.68} -{"index":254,"Rank":266,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","AmazonReviewsClassification (zh)":36.45,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":63.86,"MassiveScenarioClassification (zh-CN)":70.85,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":255,"Rank":267,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AmazonReviewsClassification (zh)":22.99,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":23.74,"MassiveScenarioClassification (zh-CN)":33.18,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":258,"Rank":270,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonReviewsClassification (zh)":20.49,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":2.81,"MassiveScenarioClassification (zh-CN)":9.19,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":261,"Rank":273,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","AmazonReviewsClassification (zh)":33.89,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":59.22,"MassiveScenarioClassification (zh-CN)":66.44,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":263,"Rank":275,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AmazonReviewsClassification (zh)":21.83,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":2.41,"MassiveScenarioClassification (zh-CN)":3.84,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":264,"Rank":276,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AmazonReviewsClassification (zh)":21.89,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":1.72,"MassiveScenarioClassification (zh-CN)":5.21,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":268,"Rank":280,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AmazonReviewsClassification (zh)":35.26,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":"","MassiveScenarioClassification (zh-CN)":"","MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":269,"Rank":281,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonReviewsClassification (zh)":37.74,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":"","MassiveScenarioClassification (zh-CN)":"","MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":270,"Rank":282,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonReviewsClassification (zh)":21.53,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":1.12,"MassiveScenarioClassification (zh-CN)":4.17,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":271,"Rank":283,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AmazonReviewsClassification (zh)":22.12,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":1.09,"MassiveScenarioClassification (zh-CN)":4.7,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":272,"Rank":284,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AmazonReviewsClassification (zh)":21.88,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":0.59,"MassiveScenarioClassification (zh-CN)":5.86,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":275,"Rank":286,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":"","IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":5.15,"MassiveScenarioClassification (zh-CN)":10.56,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"index":278,"Rank":287,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":32.52,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":60.86,"MassiveScenarioClassification (zh-CN)":65.83,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":69.83,"IFlyTek":45.47,"IFlyTek (cmn-Hans)":41.86,"JDReview":80.99,"JDReview (cmn-Hans)":80.54,"MultilingualSentiment":68.58,"MultilingualSentiment (cmn-Hans)":70.81,"OnlineShopping":90.81,"OnlineShopping (cmn-Hans)":90.45,"TNews":48.38,"TNews (cmn-Hans)":48.8,"Waimai":85.02,"Waimai (cmn-Hans)":86.3} +{"Rank":2,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":68.08,"IFlyTek":47.35,"IFlyTek (cmn-Hans)":40.74,"JDReview":79.34,"JDReview (cmn-Hans)":78.37,"MultilingualSentiment":64.74,"MultilingualSentiment (cmn-Hans)":66.0,"OnlineShopping":88.73,"OnlineShopping (cmn-Hans)":88.7,"TNews":48.38,"TNews (cmn-Hans)":46.6,"Waimai":83.9,"Waimai (cmn-Hans)":84.15} +{"Rank":3,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":67.71,"IFlyTek":44.93,"IFlyTek (cmn-Hans)":40.81,"JDReview":76.21,"JDReview (cmn-Hans)":75.72,"MultilingualSentiment":65.28,"MultilingualSentiment (cmn-Hans)":67.56,"OnlineShopping":88.4,"OnlineShopping (cmn-Hans)":88.66,"TNews":47.06,"TNews (cmn-Hans)":47.52,"Waimai":84.42,"Waimai (cmn-Hans)":85.98} +{"Rank":4,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":5,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":6,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":7,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":8,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":9,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":10,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":12,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":13,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":14,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":15,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":16,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":51.8,"IFlyTek (cmn-Hans)":null,"JDReview":86.02,"JDReview (cmn-Hans)":null,"MultilingualSentiment":76.35,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":93.2,"OnlineShopping (cmn-Hans)":null,"TNews":53.06,"TNews (cmn-Hans)":null,"Waimai":88.1,"Waimai (cmn-Hans)":null} +{"Rank":17,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":null,"IFlyTek":53.77,"IFlyTek (cmn-Hans)":null,"JDReview":88.2,"JDReview (cmn-Hans)":null,"MultilingualSentiment":77.42,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":94.48,"OnlineShopping (cmn-Hans)":null,"TNews":51.24,"TNews (cmn-Hans)":null,"Waimai":88.63,"Waimai (cmn-Hans)":null} +{"Rank":18,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"IFlyTek":48.62,"IFlyTek (cmn-Hans)":null,"JDReview":83.62,"JDReview (cmn-Hans)":null,"MultilingualSentiment":70.67,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":91.26,"OnlineShopping (cmn-Hans)":null,"TNews":51.08,"TNews (cmn-Hans)":null,"Waimai":85.36,"Waimai (cmn-Hans)":null} +{"Rank":19,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"IFlyTek":45.32,"IFlyTek (cmn-Hans)":null,"JDReview":85.38,"JDReview (cmn-Hans)":null,"MultilingualSentiment":73.7,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":91.66,"OnlineShopping (cmn-Hans)":null,"TNews":52.05,"TNews (cmn-Hans)":null,"Waimai":86.83,"Waimai (cmn-Hans)":null} +{"Rank":20,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"IFlyTek":48.74,"IFlyTek (cmn-Hans)":null,"JDReview":85.14,"JDReview (cmn-Hans)":null,"MultilingualSentiment":72.97,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":91.43,"OnlineShopping (cmn-Hans)":null,"TNews":52.1,"TNews (cmn-Hans)":null,"Waimai":86.9,"Waimai (cmn-Hans)":null} +{"Rank":21,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":22,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":null,"IFlyTek":45.49,"IFlyTek (cmn-Hans)":null,"JDReview":80.04,"JDReview (cmn-Hans)":null,"MultilingualSentiment":63.06,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":85.05,"OnlineShopping (cmn-Hans)":null,"TNews":48.15,"TNews (cmn-Hans)":null,"Waimai":83.18,"Waimai (cmn-Hans)":null} +{"Rank":23,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":24,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":25,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":26,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":27,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":28,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"IFlyTek":41.54,"IFlyTek (cmn-Hans)":null,"JDReview":81.56,"JDReview (cmn-Hans)":null,"MultilingualSentiment":58.97,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":83.51,"OnlineShopping (cmn-Hans)":null,"TNews":38.92,"TNews (cmn-Hans)":null,"Waimai":76.01,"Waimai (cmn-Hans)":null} +{"Rank":29,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":30,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":31,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":32,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":48.49,"JDReview":null,"JDReview (cmn-Hans)":84.02,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":68.13,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":86.99,"TNews":null,"TNews (cmn-Hans)":49.94,"Waimai":null,"Waimai (cmn-Hans)":84.92} +{"Rank":33,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":34,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":35,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":36,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":37,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":38,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":39,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":40,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":41,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":42,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":43,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":44,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":45,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":46,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":47,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":48,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":49,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":50,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":51,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":52,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":53,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":54,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":55,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":56,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":57,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":58,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":59,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":60,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":61,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":62,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":63,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":64,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"IFlyTek":44.42,"IFlyTek (cmn-Hans)":null,"JDReview":85.33,"JDReview (cmn-Hans)":null,"MultilingualSentiment":71.9,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":87.77,"OnlineShopping (cmn-Hans)":null,"TNews":48.28,"TNews (cmn-Hans)":null,"Waimai":83.99,"Waimai (cmn-Hans)":null} +{"Rank":65,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"IFlyTek":43.96,"IFlyTek (cmn-Hans)":null,"JDReview":86.92,"JDReview (cmn-Hans)":null,"MultilingualSentiment":72.47,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":89.59,"OnlineShopping (cmn-Hans)":null,"TNews":48.26,"TNews (cmn-Hans)":null,"Waimai":86.08,"Waimai (cmn-Hans)":null} +{"Rank":66,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":67,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":68,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":69,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":70,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":71,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":72,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":73,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":74,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":43.19,"JDReview":null,"JDReview (cmn-Hans)":79.14,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":64.6,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":85.63,"TNews":null,"TNews (cmn-Hans)":46.02,"Waimai":null,"Waimai (cmn-Hans)":82.85} +{"Rank":75,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":15.31,"JDReview":null,"JDReview (cmn-Hans)":59.57,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":40.52,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":58.65,"TNews":null,"TNews (cmn-Hans)":20.37,"Waimai":null,"Waimai (cmn-Hans)":63.48} +{"Rank":76,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":16.09,"JDReview":null,"JDReview (cmn-Hans)":59.98,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":41.28,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":57.74,"TNews":null,"TNews (cmn-Hans)":20.12,"Waimai":null,"Waimai (cmn-Hans)":62.72} +{"Rank":77,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":17.18,"JDReview":null,"JDReview (cmn-Hans)":60.19,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":41.2,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":56.94,"TNews":null,"TNews (cmn-Hans)":21.05,"Waimai":null,"Waimai (cmn-Hans)":63.31} +{"Rank":78,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":79,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":80,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":81,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":82,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":83,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":84,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":85,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":86,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":87,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":88,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":39.88,"JDReview":null,"JDReview (cmn-Hans)":70.26,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":61.9,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":84.89,"TNews":null,"TNews (cmn-Hans)":39.19,"Waimai":null,"Waimai (cmn-Hans)":82.27} +{"Rank":89,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":43.98,"JDReview":null,"JDReview (cmn-Hans)":70.34,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":66.49,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":87.75,"TNews":null,"TNews (cmn-Hans)":43.73,"Waimai":null,"Waimai (cmn-Hans)":83.97} +{"Rank":90,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":91,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":92,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":93,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":94,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":95,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":96,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"IFlyTek":42.05,"IFlyTek (cmn-Hans)":null,"JDReview":82.14,"JDReview (cmn-Hans)":null,"MultilingualSentiment":60.98,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":85.69,"OnlineShopping (cmn-Hans)":null,"TNews":43.01,"TNews (cmn-Hans)":null,"Waimai":77.22,"Waimai (cmn-Hans)":null} +{"Rank":97,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":null,"IFlyTek":41.75,"IFlyTek (cmn-Hans)":null,"JDReview":79.68,"JDReview (cmn-Hans)":null,"MultilingualSentiment":61.21,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":84.3,"OnlineShopping (cmn-Hans)":null,"TNews":45.22,"TNews (cmn-Hans)":null,"Waimai":79.57,"Waimai (cmn-Hans)":null} +{"Rank":98,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":99,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":100,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":101,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":102,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":103,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":104,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":105,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":106,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":107,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":44.62,"IFlyTek (cmn-Hans)":null,"JDReview":74.6,"JDReview (cmn-Hans)":null,"MultilingualSentiment":67.99,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":88.94,"OnlineShopping (cmn-Hans)":null,"TNews":45.77,"TNews (cmn-Hans)":null,"Waimai":82.37,"Waimai (cmn-Hans)":null} +{"Rank":108,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":109,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":110,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":111,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":50.75,"IFlyTek (cmn-Hans)":null,"JDReview":87.69,"JDReview (cmn-Hans)":null,"MultilingualSentiment":76.83,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":94.42,"OnlineShopping (cmn-Hans)":null,"TNews":52.62,"TNews (cmn-Hans)":null,"Waimai":88.77,"Waimai (cmn-Hans)":null} +{"Rank":112,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} diff --git a/all_data_tasks/9/default.jsonl b/all_data_tasks/9/default.jsonl index edce06141a501c9548c353224215b9e2347a7d87..4b0f6a34ef3968172fc3bc47dced6c1423bb7bfe 100644 --- a/all_data_tasks/9/default.jsonl +++ b/all_data_tasks/9/default.jsonl @@ -1,66 +1,112 @@ -{"index":15,"Rank":1,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":67.08,"CLSClusteringP2P":47.21,"CLSClusteringS2S":45.79,"ThuNewsClusteringP2P":87.43,"ThuNewsClusteringS2S":87.9} -{"index":276,"Rank":2,"Model":"AGE_Hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.64,"CLSClusteringP2P":57.39,"CLSClusteringS2S":54.11,"ThuNewsClusteringP2P":79.15,"ThuNewsClusteringS2S":75.9} -{"index":50,"Rank":3,"Model":"IYun-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.35,"CLSClusteringP2P":60.2,"CLSClusteringS2S":58.4,"ThuNewsClusteringP2P":76.98,"ThuNewsClusteringS2S":69.83} -{"index":17,"Rank":4,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":66.06,"CLSClusteringP2P":47.07,"CLSClusteringS2S":45.99,"ThuNewsClusteringP2P":86.08,"ThuNewsClusteringS2S":85.11} -{"index":142,"Rank":5,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.06,"CLSClusteringP2P":47.07,"CLSClusteringS2S":45.99,"ThuNewsClusteringP2P":86.08,"ThuNewsClusteringS2S":85.11} -{"index":234,"Rank":6,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.06,"CLSClusteringP2P":47.07,"CLSClusteringS2S":45.99,"ThuNewsClusteringP2P":86.08,"ThuNewsClusteringS2S":85.11} -{"index":207,"Rank":7,"Model":"xiaobu-embedding-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.17,"CLSClusteringP2P":60.42,"CLSClusteringS2S":49.54,"ThuNewsClusteringP2P":78.76,"ThuNewsClusteringS2S":71.96} -{"index":169,"Rank":8,"Model":"zpoint_large_embedding_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.23,"CLSClusteringP2P":58.29,"CLSClusteringS2S":48.79,"ThuNewsClusteringP2P":72.48,"ThuNewsClusteringS2S":69.35} -{"index":253,"Rank":9,"Model":"piccolo-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.17,"CLSClusteringP2P":57.93,"CLSClusteringS2S":48.1,"ThuNewsClusteringP2P":74.2,"ThuNewsClusteringS2S":68.43} -{"index":38,"Rank":10,"Model":"Yinka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.98,"CLSClusteringP2P":56.35,"CLSClusteringS2S":48.49,"ThuNewsClusteringP2P":74.55,"ThuNewsClusteringS2S":68.55} -{"index":29,"Rank":11,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.3,"CLSClusteringP2P":54.65,"CLSClusteringS2S":63.68,"ThuNewsClusteringP2P":64.32,"ThuNewsClusteringS2S":54.57} -{"index":129,"Rank":12,"Model":"acge_text_embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.7,"CLSClusteringP2P":47.08,"CLSClusteringS2S":44.05,"ThuNewsClusteringP2P":74.66,"ThuNewsClusteringS2S":69.0} -{"index":315,"Rank":13,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.88,"CLSClusteringP2P":60.37,"CLSClusteringS2S":51.09,"ThuNewsClusteringP2P":58.23,"ThuNewsClusteringS2S":57.83} -{"index":206,"Rank":14,"Model":"xiaobu-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.62,"CLSClusteringP2P":43.24,"CLSClusteringS2S":41.23,"ThuNewsClusteringP2P":70.06,"ThuNewsClusteringS2S":63.94} -{"index":16,"Rank":15,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.61,"CLSClusteringP2P":45.21,"CLSClusteringS2S":42.5,"ThuNewsClusteringP2P":68.24,"ThuNewsClusteringS2S":62.5} -{"index":233,"Rank":16,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.61,"CLSClusteringP2P":45.21,"CLSClusteringS2S":42.5,"ThuNewsClusteringP2P":68.24,"ThuNewsClusteringS2S":62.5} -{"index":116,"Rank":17,"Model":"stella-mrl-large-zh-v3.5-1792d-1024<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.32,"CLSClusteringP2P":43.03,"CLSClusteringS2S":40.42,"ThuNewsClusteringP2P":70.6,"ThuNewsClusteringS2S":63.21} -{"index":155,"Rank":18,"Model":"stella-mrl-large-zh-v3.5-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.32,"CLSClusteringP2P":43.03,"CLSClusteringS2S":40.42,"ThuNewsClusteringP2P":70.6,"ThuNewsClusteringS2S":63.21} -{"index":105,"Rank":19,"Model":"alime-embedding-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.0,"CLSClusteringP2P":42.52,"CLSClusteringS2S":39.73,"ThuNewsClusteringP2P":70.1,"ThuNewsClusteringS2S":63.67} -{"index":154,"Rank":20,"Model":"stella-large-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.9,"CLSClusteringP2P":41.94,"CLSClusteringS2S":40.34,"ThuNewsClusteringP2P":69.61,"ThuNewsClusteringS2S":63.71} -{"index":284,"Rank":21,"Model":"gte-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.86,"CLSClusteringP2P":42.58,"CLSClusteringS2S":40.42,"ThuNewsClusteringP2P":68.81,"ThuNewsClusteringS2S":63.61} -{"index":12,"Rank":22,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.75,"CLSClusteringP2P":41.64,"CLSClusteringS2S":40.33,"ThuNewsClusteringP2P":69.28,"ThuNewsClusteringS2S":63.75} -{"index":173,"Rank":23,"Model":"stella-base-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.3,"CLSClusteringP2P":42.86,"CLSClusteringS2S":39.98,"ThuNewsClusteringP2P":67.88,"ThuNewsClusteringS2S":62.47} -{"index":286,"Rank":24,"Model":"gte-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.07,"CLSClusteringP2P":42.1,"CLSClusteringS2S":38.91,"ThuNewsClusteringP2P":68.36,"ThuNewsClusteringS2S":62.92} -{"index":180,"Rank":25,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":52.3,"CLSClusteringP2P":44.42,"CLSClusteringS2S":42.58,"ThuNewsClusteringP2P":64.68,"ThuNewsClusteringS2S":57.53} -{"index":46,"Rank":26,"Model":"Dmeta-embedding-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.96,"CLSClusteringP2P":40.24,"CLSClusteringS2S":39.17,"ThuNewsClusteringP2P":65.58,"ThuNewsClusteringS2S":58.84} -{"index":47,"Rank":27,"Model":"Dmeta-embedding-zh-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.8,"CLSClusteringP2P":40.79,"CLSClusteringS2S":38.43,"ThuNewsClusteringP2P":65.15,"ThuNewsClusteringS2S":58.82} -{"index":26,"Rank":28,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":50.01,"CLSClusteringP2P":41.23,"CLSClusteringS2S":40.04,"ThuNewsClusteringP2P":62.03,"ThuNewsClusteringS2S":56.75} -{"index":51,"Rank":29,"Model":"mist-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.48,"CLSClusteringP2P":39.51,"CLSClusteringS2S":38.0,"ThuNewsClusteringP2P":62.57,"ThuNewsClusteringS2S":57.84} -{"index":172,"Rank":30,"Model":"stella-base-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.4,"CLSClusteringP2P":40.26,"CLSClusteringS2S":37.18,"ThuNewsClusteringP2P":62.19,"ThuNewsClusteringS2S":57.97} -{"index":175,"Rank":31,"Model":"stella-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.16,"CLSClusteringP2P":39.95,"CLSClusteringS2S":38.18,"ThuNewsClusteringP2P":61.4,"ThuNewsClusteringS2S":57.11} -{"index":21,"Rank":32,"Model":"tao-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.04,"CLSClusteringP2P":40.03,"CLSClusteringS2S":38.19,"ThuNewsClusteringP2P":60.85,"ThuNewsClusteringS2S":57.09} -{"index":20,"Rank":33,"Model":"tao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.0,"CLSClusteringP2P":39.96,"CLSClusteringS2S":38.19,"ThuNewsClusteringP2P":60.74,"ThuNewsClusteringS2S":57.09} -{"index":27,"Rank":34,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":48.99,"CLSClusteringP2P":41.44,"CLSClusteringS2S":38.33,"ThuNewsClusteringP2P":59.61,"ThuNewsClusteringS2S":56.58} -{"index":288,"Rank":35,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.95,"CLSClusteringP2P":39.54,"CLSClusteringS2S":37.33,"ThuNewsClusteringP2P":63.79,"ThuNewsClusteringS2S":55.14} -{"index":41,"Rank":36,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.88,"CLSClusteringP2P":42.46,"CLSClusteringS2S":40.38,"ThuNewsClusteringP2P":58.54,"ThuNewsClusteringS2S":54.15} -{"index":221,"Rank":37,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":48.88,"CLSClusteringP2P":38.6,"CLSClusteringS2S":38.02,"ThuNewsClusteringP2P":60.39,"ThuNewsClusteringS2S":58.51} -{"index":40,"Rank":38,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.81,"CLSClusteringP2P":42.84,"CLSClusteringS2S":39.77,"ThuNewsClusteringP2P":58.93,"ThuNewsClusteringS2S":53.71} -{"index":171,"Rank":39,"Model":"stella-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.7,"CLSClusteringP2P":39.71,"CLSClusteringS2S":36.56,"ThuNewsClusteringP2P":63.19,"ThuNewsClusteringS2S":55.36} -{"index":174,"Rank":40,"Model":"stella-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.65,"CLSClusteringP2P":40.27,"CLSClusteringS2S":37.5,"ThuNewsClusteringP2P":62.15,"ThuNewsClusteringS2S":54.67} -{"index":226,"Rank":41,"Model":"m3e-ernie-xbase-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.64,"CLSClusteringP2P":39.32,"CLSClusteringS2S":37.84,"ThuNewsClusteringP2P":60.69,"ThuNewsClusteringS2S":56.73} -{"index":208,"Rank":42,"Model":"lim_base_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.35,"CLSClusteringP2P":33.55,"CLSClusteringS2S":36.18,"ThuNewsClusteringP2P":63.34,"ThuNewsClusteringS2S":60.33} -{"index":184,"Rank":43,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":48.23,"CLSClusteringP2P":40.68,"CLSClusteringS2S":38.59,"ThuNewsClusteringP2P":58.05,"ThuNewsClusteringS2S":55.59} -{"index":274,"Rank":44,"Model":"ZNV-Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.05,"CLSClusteringP2P":38.88,"CLSClusteringS2S":39.09,"ThuNewsClusteringP2P":59.51,"ThuNewsClusteringS2S":54.72} -{"index":220,"Rank":45,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":47.68,"CLSClusteringP2P":39.81,"CLSClusteringS2S":37.34,"ThuNewsClusteringP2P":59.77,"ThuNewsClusteringS2S":53.78} -{"index":23,"Rank":46,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":47.53,"CLSClusteringP2P":39.91,"CLSClusteringS2S":37.63,"ThuNewsClusteringP2P":58.45,"ThuNewsClusteringS2S":54.12} -{"index":39,"Rank":47,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.21,"CLSClusteringP2P":42.21,"CLSClusteringS2S":40.33,"ThuNewsClusteringP2P":55.81,"ThuNewsClusteringS2S":50.5} -{"index":251,"Rank":48,"Model":"piccolo-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.12,"CLSClusteringP2P":38.36,"CLSClusteringS2S":35.65,"ThuNewsClusteringP2P":61.44,"ThuNewsClusteringS2S":53.02} -{"index":252,"Rank":49,"Model":"piccolo-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.04,"CLSClusteringP2P":38.98,"CLSClusteringS2S":36.04,"ThuNewsClusteringP2P":60.58,"ThuNewsClusteringS2S":52.56} -{"index":199,"Rank":50,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.47,"CLSClusteringP2P":39.97,"CLSClusteringS2S":38.4,"ThuNewsClusteringP2P":54.08,"ThuNewsClusteringS2S":53.42} -{"index":280,"Rank":51,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.47,"CLSClusteringP2P":39.97,"CLSClusteringS2S":38.4,"ThuNewsClusteringP2P":54.08,"ThuNewsClusteringS2S":53.42} -{"index":311,"Rank":52,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.68,"CLSClusteringP2P":38.26,"CLSClusteringS2S":35.91,"ThuNewsClusteringP2P":58.71,"ThuNewsClusteringS2S":49.86} -{"index":186,"Rank":53,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":45.26,"CLSClusteringP2P":39.14,"CLSClusteringS2S":37.79,"ThuNewsClusteringP2P":55.18,"ThuNewsClusteringS2S":48.93} -{"index":279,"Rank":54,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":44.39,"CLSClusteringP2P":37.01,"CLSClusteringS2S":33.46,"ThuNewsClusteringP2P":58.83,"ThuNewsClusteringS2S":48.26} -{"index":33,"Rank":55,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":44.18,"CLSClusteringP2P":38.14,"CLSClusteringS2S":35.14,"ThuNewsClusteringP2P":54.22,"ThuNewsClusteringS2S":49.22} -{"index":189,"Rank":56,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.24,"CLSClusteringP2P":36.73,"CLSClusteringS2S":36.45,"ThuNewsClusteringP2P":50.24,"ThuNewsClusteringS2S":49.54} -{"index":190,"Rank":57,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.42,"CLSClusteringP2P":35.21,"CLSClusteringS2S":35.12,"ThuNewsClusteringP2P":51.03,"ThuNewsClusteringS2S":48.32} -{"index":188,"Rank":58,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.42,"CLSClusteringP2P":35.36,"CLSClusteringS2S":37.07,"ThuNewsClusteringP2P":48.91,"ThuNewsClusteringS2S":48.34} -{"index":191,"Rank":59,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.32,"CLSClusteringP2P":36.0,"CLSClusteringS2S":36.35,"ThuNewsClusteringP2P":49.19,"ThuNewsClusteringS2S":47.76} -{"index":183,"Rank":60,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":40.68,"CLSClusteringP2P":32.41,"CLSClusteringS2S":36.99,"ThuNewsClusteringP2P":40.98,"ThuNewsClusteringS2S":52.36} -{"index":49,"Rank":61,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":39.67,"CLSClusteringP2P":39.24,"CLSClusteringS2S":35.36,"ThuNewsClusteringP2P":47.3,"ThuNewsClusteringS2S":36.77} -{"index":277,"Rank":62,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":37.66,"CLSClusteringP2P":35.27,"CLSClusteringS2S":32.42,"ThuNewsClusteringP2P":42.92,"ThuNewsClusteringS2S":40.01} -{"index":159,"Rank":63,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":34.44,"CLSClusteringP2P":27.43,"CLSClusteringS2S":28.32,"ThuNewsClusteringP2P":38.54,"ThuNewsClusteringS2S":43.45} -{"index":48,"Rank":64,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":31.8,"CLSClusteringP2P":34.98,"CLSClusteringS2S":27.82,"ThuNewsClusteringP2P":40.17,"ThuNewsClusteringS2S":24.23} -{"index":53,"Rank":65,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":30.02,"CLSClusteringP2P":30.13,"CLSClusteringS2S":28.77,"ThuNewsClusteringP2P":35.05,"ThuNewsClusteringS2S":26.14} -{"index":289,"Rank":66,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":18.34,"CLSClusteringP2P":18.66,"CLSClusteringS2S":16.82,"ThuNewsClusteringP2P":20.69,"ThuNewsClusteringS2S":17.19} +{"Rank":1,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":67.08,"CLSClusteringP2P":47.21,"CLSClusteringS2S":45.79,"ThuNewsClusteringP2P":87.43,"ThuNewsClusteringS2S":87.9} +{"Rank":2,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.88,"CLSClusteringP2P":60.37,"CLSClusteringS2S":51.09,"ThuNewsClusteringP2P":58.23,"ThuNewsClusteringS2S":57.83} +{"Rank":3,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.75,"CLSClusteringP2P":41.64,"CLSClusteringS2S":40.33,"ThuNewsClusteringP2P":69.28,"ThuNewsClusteringS2S":63.75} +{"Rank":4,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":50.01,"CLSClusteringP2P":41.23,"CLSClusteringS2S":40.04,"ThuNewsClusteringP2P":62.03,"ThuNewsClusteringS2S":56.75} +{"Rank":5,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":48.99,"CLSClusteringP2P":41.44,"CLSClusteringS2S":38.33,"ThuNewsClusteringP2P":59.61,"ThuNewsClusteringS2S":56.58} +{"Rank":6,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":48.88,"CLSClusteringP2P":38.6,"CLSClusteringS2S":38.02,"ThuNewsClusteringP2P":60.39,"ThuNewsClusteringS2S":58.51} +{"Rank":7,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":48.23,"CLSClusteringP2P":40.68,"CLSClusteringS2S":38.59,"ThuNewsClusteringP2P":58.05,"ThuNewsClusteringS2S":55.59} +{"Rank":8,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":47.68,"CLSClusteringP2P":39.81,"CLSClusteringS2S":37.34,"ThuNewsClusteringP2P":59.77,"ThuNewsClusteringS2S":53.78} +{"Rank":9,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":47.53,"CLSClusteringP2P":39.91,"CLSClusteringS2S":37.63,"ThuNewsClusteringP2P":58.45,"ThuNewsClusteringS2S":54.12} +{"Rank":10,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.68,"CLSClusteringP2P":38.26,"CLSClusteringS2S":35.91,"ThuNewsClusteringP2P":58.71,"ThuNewsClusteringS2S":49.86} +{"Rank":11,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":45.26,"CLSClusteringP2P":39.14,"CLSClusteringS2S":37.79,"ThuNewsClusteringP2P":55.18,"ThuNewsClusteringS2S":48.93} +{"Rank":12,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":44.39,"CLSClusteringP2P":37.01,"CLSClusteringS2S":33.46,"ThuNewsClusteringP2P":58.83,"ThuNewsClusteringS2S":48.26} +{"Rank":13,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":44.18,"CLSClusteringP2P":38.14,"CLSClusteringS2S":35.14,"ThuNewsClusteringP2P":54.22,"ThuNewsClusteringS2S":49.22} +{"Rank":14,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":40.68,"CLSClusteringP2P":32.41,"CLSClusteringS2S":36.99,"ThuNewsClusteringP2P":40.98,"ThuNewsClusteringS2S":52.36} +{"Rank":15,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":37.66,"CLSClusteringP2P":35.27,"CLSClusteringS2S":32.42,"ThuNewsClusteringP2P":42.92,"ThuNewsClusteringS2S":40.01} +{"Rank":16,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":30.02,"CLSClusteringP2P":30.13,"CLSClusteringS2S":28.77,"ThuNewsClusteringP2P":35.05,"ThuNewsClusteringS2S":26.14} +{"Rank":17,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":18,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":19,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":20,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":21,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":22,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":23,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":24,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":25,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":26,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":27,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":28,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":29,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":30,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":31,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":32,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":33,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":34,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":35,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":36,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":37,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":38,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":39,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":40,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":41,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":42,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":43,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":44,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":45,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":46,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":47,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":48,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":49,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":50,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":51,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":52,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":53,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":54,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":55,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":56,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":57,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":58,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":59,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":60,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":61,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":62,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":63,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":64,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":65,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":66,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":67,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":68,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":69,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":70,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":71,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":72,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":73,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":74,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":75,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":76,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":77,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":78,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":79,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":80,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":81,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":82,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":83,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":84,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":85,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":86,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":87,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":88,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":89,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":90,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":91,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":92,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":93,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":94,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":95,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":96,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":97,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":98,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":99,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":100,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":101,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":102,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":103,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":104,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":105,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":106,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":107,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":108,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":109,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":110,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":111,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":112,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} diff --git a/app.py b/app.py index ad3e19d2522ae13ac6979de4957fb18c1d6a0834..440c33de9d7dc7c955feaa3e87620200c45b0100 100644 --- a/app.py +++ b/app.py @@ -255,7 +255,6 @@ def filter_data(search_query, model_types, model_sizes, *full_dataframes): sizes = df["Model Size (Million Parameters)"].replace('', 0) mask = sizes.apply(lambda size: any(numeric_interval.contains(size))) df = df[mask] - df.round(2) output_dataframes.append(df) return output_dataframes @@ -334,8 +333,7 @@ with gr.Blocks(css=css) as block: with gr.Row(): datatype = ["number", "markdown"] + ["number"] * len(item["data"]) - # NOTE: some model memory usage added more float points (0.30000004 -> 0.3) - dataframe = gr.Dataframe(item["data"].round(2), datatype=datatype, type="pandas", height=500) + dataframe = gr.Dataframe(item["data"], datatype=datatype, type="pandas", height=500) dataframes.append(dataframe) full_dataframe = gr.Dataframe(item["data"], datatype=datatype, type="pandas", visible=False) diff --git a/boards_data/bright/data_tasks/Retrieval/default.jsonl b/boards_data/bright/data_tasks/Retrieval/default.jsonl index 7eee3b897cab8d2a6c37a17420fc48e63f7edd32..651c586d5c83a7d7ab48a093a03c03f6d7da87b2 100644 --- a/boards_data/bright/data_tasks/Retrieval/default.jsonl +++ b/boards_data/bright/data_tasks/Retrieval/default.jsonl @@ -1,14 +1,14 @@ -{"level_0":0,"index":4,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":22.38,"BrightRetrieval (aops)":15.1,"BrightRetrieval (biology)":32.09,"BrightRetrieval (earth_science)":40.66,"BrightRetrieval (economics)":16.18,"BrightRetrieval (leetcode)":31.07,"BrightRetrieval (pony)":1.25,"BrightRetrieval (psychology)":26.58,"BrightRetrieval (robotics)":12.82,"BrightRetrieval (stackoverflow)":13.95,"BrightRetrieval (sustainable_living)":20.82,"BrightRetrieval (theoremqa_questions)":29.9,"BrightRetrieval (theoremqa_theorems)":28.15} -{"level_0":1,"index":3,"Rank":2,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":21.75,"BrightRetrieval (aops)":14.36,"BrightRetrieval (biology)":30.92,"BrightRetrieval (earth_science)":36.22,"BrightRetrieval (economics)":17.72,"BrightRetrieval (leetcode)":25.46,"BrightRetrieval (pony)":9.79,"BrightRetrieval (psychology)":24.61,"BrightRetrieval (robotics)":13.47,"BrightRetrieval (stackoverflow)":19.85,"BrightRetrieval (sustainable_living)":14.93,"BrightRetrieval (theoremqa_questions)":26.97,"BrightRetrieval (theoremqa_theorems)":26.66} -{"level_0":2,"index":7,"Rank":3,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":20.43,"BrightRetrieval (aops)":8.91,"BrightRetrieval (biology)":25.04,"BrightRetrieval (earth_science)":32.77,"BrightRetrieval (economics)":19.0,"BrightRetrieval (leetcode)":29.85,"BrightRetrieval (pony)":21.98,"BrightRetrieval (psychology)":19.92,"BrightRetrieval (robotics)":17.31,"BrightRetrieval (stackoverflow)":11.62,"BrightRetrieval (sustainable_living)":18.04,"BrightRetrieval (theoremqa_questions)":23.34,"BrightRetrieval (theoremqa_theorems)":17.41} -{"level_0":3,"index":0,"Rank":4,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":19.73,"BrightRetrieval (aops)":9.33,"BrightRetrieval (biology)":22.98,"BrightRetrieval (earth_science)":34.38,"BrightRetrieval (economics)":19.5,"BrightRetrieval (leetcode)":29.64,"BrightRetrieval (pony)":3.59,"BrightRetrieval (psychology)":27.86,"BrightRetrieval (robotics)":15.98,"BrightRetrieval (stackoverflow)":17.93,"BrightRetrieval (sustainable_living)":17.25,"BrightRetrieval (theoremqa_questions)":21.51,"BrightRetrieval (theoremqa_theorems)":16.77} -{"level_0":4,"index":10,"Rank":5,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":18.64,"BrightRetrieval (aops)":8.26,"BrightRetrieval (biology)":21.91,"BrightRetrieval (earth_science)":34.35,"BrightRetrieval (economics)":22.81,"BrightRetrieval (leetcode)":27.5,"BrightRetrieval (pony)":5.02,"BrightRetrieval (psychology)":27.43,"BrightRetrieval (robotics)":17.39,"BrightRetrieval (stackoverflow)":19.06,"BrightRetrieval (sustainable_living)":18.82,"BrightRetrieval (theoremqa_questions)":14.59,"BrightRetrieval (theoremqa_theorems)":6.5} -{"level_0":5,"index":8,"Rank":6,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":18.0,"BrightRetrieval (aops)":7.43,"BrightRetrieval (biology)":19.49,"BrightRetrieval (earth_science)":26.63,"BrightRetrieval (economics)":17.84,"BrightRetrieval (leetcode)":27.35,"BrightRetrieval (pony)":1.97,"BrightRetrieval (psychology)":18.97,"BrightRetrieval (robotics)":16.7,"BrightRetrieval (stackoverflow)":12.72,"BrightRetrieval (sustainable_living)":19.79,"BrightRetrieval (theoremqa_questions)":23.05,"BrightRetrieval (theoremqa_theorems)":24.05} -{"level_0":6,"index":1,"Rank":7,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.57,"BrightRetrieval (aops)":7.45,"BrightRetrieval (biology)":23.55,"BrightRetrieval (earth_science)":25.09,"BrightRetrieval (economics)":19.85,"BrightRetrieval (leetcode)":30.6,"BrightRetrieval (pony)":1.48,"BrightRetrieval (psychology)":24.79,"BrightRetrieval (robotics)":11.21,"BrightRetrieval (stackoverflow)":15.03,"BrightRetrieval (sustainable_living)":15.58,"BrightRetrieval (theoremqa_questions)":26.06,"BrightRetrieval (theoremqa_theorems)":10.13} -{"level_0":7,"index":13,"Rank":8,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.43,"BrightRetrieval (aops)":8.45,"BrightRetrieval (biology)":23.67,"BrightRetrieval (earth_science)":26.27,"BrightRetrieval (economics)":19.98,"BrightRetrieval (leetcode)":23.65,"BrightRetrieval (pony)":2.45,"BrightRetrieval (psychology)":27.52,"BrightRetrieval (robotics)":12.93,"BrightRetrieval (stackoverflow)":12.49,"BrightRetrieval (sustainable_living)":20.32,"BrightRetrieval (theoremqa_questions)":22.22,"BrightRetrieval (theoremqa_theorems)":9.25} -{"level_0":8,"index":11,"Rank":9,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":17.43,"BrightRetrieval (aops)":7.1,"BrightRetrieval (biology)":18.84,"BrightRetrieval (earth_science)":25.96,"BrightRetrieval (economics)":15.49,"BrightRetrieval (leetcode)":28.72,"BrightRetrieval (pony)":4.81,"BrightRetrieval (psychology)":15.79,"BrightRetrieval (robotics)":16.37,"BrightRetrieval (stackoverflow)":9.83,"BrightRetrieval (sustainable_living)":18.51,"BrightRetrieval (theoremqa_questions)":23.94,"BrightRetrieval (theoremqa_theorems)":23.78} -{"level_0":9,"index":6,"Rank":10,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":16.24,"BrightRetrieval (aops)":6.46,"BrightRetrieval (biology)":18.98,"BrightRetrieval (earth_science)":27.45,"BrightRetrieval (economics)":20.18,"BrightRetrieval (leetcode)":26.78,"BrightRetrieval (pony)":1.77,"BrightRetrieval (psychology)":21.82,"BrightRetrieval (robotics)":16.21,"BrightRetrieval (stackoverflow)":16.47,"BrightRetrieval (sustainable_living)":17.69,"BrightRetrieval (theoremqa_questions)":15.07,"BrightRetrieval (theoremqa_theorems)":6.04} -{"level_0":10,"index":12,"Rank":11,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":14.8,"BrightRetrieval (aops)":5.32,"BrightRetrieval (biology)":15.52,"BrightRetrieval (earth_science)":20.11,"BrightRetrieval (economics)":16.64,"BrightRetrieval (leetcode)":26.4,"BrightRetrieval (pony)":6.95,"BrightRetrieval (psychology)":22.63,"BrightRetrieval (robotics)":8.36,"BrightRetrieval (stackoverflow)":9.48,"BrightRetrieval (sustainable_living)":15.34,"BrightRetrieval (theoremqa_questions)":18.49,"BrightRetrieval (theoremqa_theorems)":12.38} -{"level_0":11,"index":2,"Rank":12,"Model":"bm25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":14.29,"BrightRetrieval (aops)":6.2,"BrightRetrieval (biology)":19.19,"BrightRetrieval (earth_science)":27.06,"BrightRetrieval (economics)":14.87,"BrightRetrieval (leetcode)":24.37,"BrightRetrieval (pony)":7.93,"BrightRetrieval (psychology)":12.51,"BrightRetrieval (robotics)":13.53,"BrightRetrieval (stackoverflow)":16.55,"BrightRetrieval (sustainable_living)":15.22,"BrightRetrieval (theoremqa_questions)":9.78,"BrightRetrieval (theoremqa_theorems)":4.25} -{"level_0":12,"index":9,"Rank":13,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":14.12,"BrightRetrieval (aops)":7.94,"BrightRetrieval (biology)":15.61,"BrightRetrieval (earth_science)":21.52,"BrightRetrieval (economics)":15.99,"BrightRetrieval (leetcode)":20.0,"BrightRetrieval (pony)":1.32,"BrightRetrieval (psychology)":21.94,"BrightRetrieval (robotics)":11.45,"BrightRetrieval (stackoverflow)":11.21,"BrightRetrieval (sustainable_living)":13.16,"BrightRetrieval (theoremqa_questions)":20.07,"BrightRetrieval (theoremqa_theorems)":9.29} -{"level_0":13,"index":5,"Rank":14,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":13.47,"BrightRetrieval (aops)":6.08,"BrightRetrieval (biology)":11.96,"BrightRetrieval (earth_science)":24.15,"BrightRetrieval (economics)":16.59,"BrightRetrieval (leetcode)":26.68,"BrightRetrieval (pony)":5.64,"BrightRetrieval (psychology)":17.44,"BrightRetrieval (robotics)":12.21,"BrightRetrieval (stackoverflow)":9.51,"BrightRetrieval (sustainable_living)":13.27,"BrightRetrieval (theoremqa_questions)":12.56,"BrightRetrieval (theoremqa_theorems)":5.51} +{"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":22.38,"BrightRetrieval (aops)":15.1,"BrightRetrieval (biology)":32.09,"BrightRetrieval (earth_science)":40.66,"BrightRetrieval (economics)":16.18,"BrightRetrieval (leetcode)":31.07,"BrightRetrieval (pony)":1.25,"BrightRetrieval (psychology)":26.58,"BrightRetrieval (robotics)":12.82,"BrightRetrieval (stackoverflow)":13.95,"BrightRetrieval (sustainable_living)":20.82,"BrightRetrieval (theoremqa_questions)":29.9,"BrightRetrieval (theoremqa_theorems)":28.15} +{"Rank":2,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":21.75,"BrightRetrieval (aops)":14.36,"BrightRetrieval (biology)":30.92,"BrightRetrieval (earth_science)":36.22,"BrightRetrieval (economics)":17.72,"BrightRetrieval (leetcode)":25.46,"BrightRetrieval (pony)":9.79,"BrightRetrieval (psychology)":24.61,"BrightRetrieval (robotics)":13.47,"BrightRetrieval (stackoverflow)":19.85,"BrightRetrieval (sustainable_living)":14.93,"BrightRetrieval (theoremqa_questions)":26.97,"BrightRetrieval (theoremqa_theorems)":26.66} +{"Rank":3,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":20.43,"BrightRetrieval (aops)":8.91,"BrightRetrieval (biology)":25.04,"BrightRetrieval (earth_science)":32.77,"BrightRetrieval (economics)":19.0,"BrightRetrieval (leetcode)":29.85,"BrightRetrieval (pony)":21.98,"BrightRetrieval (psychology)":19.92,"BrightRetrieval (robotics)":17.31,"BrightRetrieval (stackoverflow)":11.62,"BrightRetrieval (sustainable_living)":18.04,"BrightRetrieval (theoremqa_questions)":23.34,"BrightRetrieval (theoremqa_theorems)":17.41} +{"Rank":4,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":19.73,"BrightRetrieval (aops)":9.33,"BrightRetrieval (biology)":22.98,"BrightRetrieval (earth_science)":34.38,"BrightRetrieval (economics)":19.5,"BrightRetrieval (leetcode)":29.64,"BrightRetrieval (pony)":3.59,"BrightRetrieval (psychology)":27.86,"BrightRetrieval (robotics)":15.98,"BrightRetrieval (stackoverflow)":17.93,"BrightRetrieval (sustainable_living)":17.25,"BrightRetrieval (theoremqa_questions)":21.51,"BrightRetrieval (theoremqa_theorems)":16.77} +{"Rank":5,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":18.64,"BrightRetrieval (aops)":8.26,"BrightRetrieval (biology)":21.91,"BrightRetrieval (earth_science)":34.35,"BrightRetrieval (economics)":22.81,"BrightRetrieval (leetcode)":27.5,"BrightRetrieval (pony)":5.02,"BrightRetrieval (psychology)":27.43,"BrightRetrieval (robotics)":17.39,"BrightRetrieval (stackoverflow)":19.06,"BrightRetrieval (sustainable_living)":18.82,"BrightRetrieval (theoremqa_questions)":14.59,"BrightRetrieval (theoremqa_theorems)":6.5} +{"Rank":6,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":18.0,"BrightRetrieval (aops)":7.43,"BrightRetrieval (biology)":19.49,"BrightRetrieval (earth_science)":26.63,"BrightRetrieval (economics)":17.84,"BrightRetrieval (leetcode)":27.35,"BrightRetrieval (pony)":1.97,"BrightRetrieval (psychology)":18.97,"BrightRetrieval (robotics)":16.7,"BrightRetrieval (stackoverflow)":12.72,"BrightRetrieval (sustainable_living)":19.79,"BrightRetrieval (theoremqa_questions)":23.05,"BrightRetrieval (theoremqa_theorems)":24.05} +{"Rank":7,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.57,"BrightRetrieval (aops)":7.45,"BrightRetrieval (biology)":23.55,"BrightRetrieval (earth_science)":25.09,"BrightRetrieval (economics)":19.85,"BrightRetrieval (leetcode)":30.6,"BrightRetrieval (pony)":1.48,"BrightRetrieval (psychology)":24.79,"BrightRetrieval (robotics)":11.21,"BrightRetrieval (stackoverflow)":15.03,"BrightRetrieval (sustainable_living)":15.58,"BrightRetrieval (theoremqa_questions)":26.06,"BrightRetrieval (theoremqa_theorems)":10.13} +{"Rank":8,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.43,"BrightRetrieval (aops)":8.45,"BrightRetrieval (biology)":23.67,"BrightRetrieval (earth_science)":26.27,"BrightRetrieval (economics)":19.98,"BrightRetrieval (leetcode)":23.65,"BrightRetrieval (pony)":2.45,"BrightRetrieval (psychology)":27.52,"BrightRetrieval (robotics)":12.93,"BrightRetrieval (stackoverflow)":12.49,"BrightRetrieval (sustainable_living)":20.32,"BrightRetrieval (theoremqa_questions)":22.22,"BrightRetrieval (theoremqa_theorems)":9.25} +{"Rank":9,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":17.43,"BrightRetrieval (aops)":7.1,"BrightRetrieval (biology)":18.84,"BrightRetrieval (earth_science)":25.96,"BrightRetrieval (economics)":15.49,"BrightRetrieval (leetcode)":28.72,"BrightRetrieval (pony)":4.81,"BrightRetrieval (psychology)":15.79,"BrightRetrieval (robotics)":16.37,"BrightRetrieval (stackoverflow)":9.83,"BrightRetrieval (sustainable_living)":18.51,"BrightRetrieval (theoremqa_questions)":23.94,"BrightRetrieval (theoremqa_theorems)":23.78} +{"Rank":10,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":16.24,"BrightRetrieval (aops)":6.46,"BrightRetrieval (biology)":18.98,"BrightRetrieval (earth_science)":27.45,"BrightRetrieval (economics)":20.18,"BrightRetrieval (leetcode)":26.78,"BrightRetrieval (pony)":1.77,"BrightRetrieval (psychology)":21.82,"BrightRetrieval (robotics)":16.21,"BrightRetrieval (stackoverflow)":16.47,"BrightRetrieval (sustainable_living)":17.69,"BrightRetrieval (theoremqa_questions)":15.07,"BrightRetrieval (theoremqa_theorems)":6.04} +{"Rank":11,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":14.8,"BrightRetrieval (aops)":5.32,"BrightRetrieval (biology)":15.52,"BrightRetrieval (earth_science)":20.11,"BrightRetrieval (economics)":16.64,"BrightRetrieval (leetcode)":26.4,"BrightRetrieval (pony)":6.95,"BrightRetrieval (psychology)":22.63,"BrightRetrieval (robotics)":8.36,"BrightRetrieval (stackoverflow)":9.48,"BrightRetrieval (sustainable_living)":15.34,"BrightRetrieval (theoremqa_questions)":18.49,"BrightRetrieval (theoremqa_theorems)":12.38} +{"Rank":12,"Model":"bm25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":14.29,"BrightRetrieval (aops)":6.2,"BrightRetrieval (biology)":19.19,"BrightRetrieval (earth_science)":27.06,"BrightRetrieval (economics)":14.87,"BrightRetrieval (leetcode)":24.37,"BrightRetrieval (pony)":7.93,"BrightRetrieval (psychology)":12.51,"BrightRetrieval (robotics)":13.53,"BrightRetrieval (stackoverflow)":16.55,"BrightRetrieval (sustainable_living)":15.22,"BrightRetrieval (theoremqa_questions)":9.78,"BrightRetrieval (theoremqa_theorems)":4.25} +{"Rank":13,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":14.12,"BrightRetrieval (aops)":7.94,"BrightRetrieval (biology)":15.61,"BrightRetrieval (earth_science)":21.52,"BrightRetrieval (economics)":15.99,"BrightRetrieval (leetcode)":20.0,"BrightRetrieval (pony)":1.32,"BrightRetrieval (psychology)":21.94,"BrightRetrieval (robotics)":11.45,"BrightRetrieval (stackoverflow)":11.21,"BrightRetrieval (sustainable_living)":13.16,"BrightRetrieval (theoremqa_questions)":20.07,"BrightRetrieval (theoremqa_theorems)":9.29} +{"Rank":14,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":13.47,"BrightRetrieval (aops)":6.08,"BrightRetrieval (biology)":11.96,"BrightRetrieval (earth_science)":24.15,"BrightRetrieval (economics)":16.59,"BrightRetrieval (leetcode)":26.68,"BrightRetrieval (pony)":5.64,"BrightRetrieval (psychology)":17.44,"BrightRetrieval (robotics)":12.21,"BrightRetrieval (stackoverflow)":9.51,"BrightRetrieval (sustainable_living)":13.27,"BrightRetrieval (theoremqa_questions)":12.56,"BrightRetrieval (theoremqa_theorems)":5.51} diff --git a/boards_data/da/data_tasks/BitextMining/default.jsonl b/boards_data/da/data_tasks/BitextMining/default.jsonl index 665bcd2af7b494312b2871eee4a14f08b1e5d16a..79d390547aa179d45176af4da59200f9fc3d82a1 100644 --- a/boards_data/da/data_tasks/BitextMining/default.jsonl +++ b/boards_data/da/data_tasks/BitextMining/default.jsonl @@ -1,24 +1,24 @@ -{"level_0":0,"index":9,"Rank":1,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","BornholmBitextMining":47.37} -{"level_0":1,"index":13,"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"BornholmBitextMining":46.4} -{"level_0":2,"index":14,"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"BornholmBitextMining":44.16} -{"level_0":3,"index":15,"Rank":4,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"BornholmBitextMining":43.89} -{"level_0":4,"index":12,"Rank":5,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"BornholmBitextMining":40.27} -{"level_0":5,"index":11,"Rank":6,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"BornholmBitextMining":40.15} -{"level_0":6,"index":10,"Rank":7,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"BornholmBitextMining":40.09} -{"level_0":7,"index":1,"Rank":8,"Model":"e5-dansk-test-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","BornholmBitextMining":37.97} -{"level_0":8,"index":20,"Rank":9,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"BornholmBitextMining":29.68} -{"level_0":9,"index":8,"Rank":10,"Model":"dfm-sentence-encoder-large-1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"BornholmBitextMining":15.93} -{"level_0":10,"index":4,"Rank":11,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"BornholmBitextMining":14.08} -{"level_0":11,"index":7,"Rank":12,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"BornholmBitextMining":11.65} -{"level_0":12,"index":5,"Rank":13,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"BornholmBitextMining":9.88} -{"level_0":13,"index":2,"Rank":14,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"BornholmBitextMining":6.6} -{"level_0":14,"index":22,"Rank":15,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"BornholmBitextMining":6.34} -{"level_0":15,"index":17,"Rank":16,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"BornholmBitextMining":6.08} -{"level_0":16,"index":6,"Rank":17,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"BornholmBitextMining":4.53} -{"level_0":17,"index":23,"Rank":18,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"BornholmBitextMining":4.42} -{"level_0":18,"index":18,"Rank":19,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"BornholmBitextMining":2.9} -{"level_0":19,"index":16,"Rank":20,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"BornholmBitextMining":1.44} -{"level_0":20,"index":3,"Rank":21,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"BornholmBitextMining":0.85} -{"level_0":21,"index":0,"Rank":22,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"BornholmBitextMining":""} -{"level_0":22,"index":19,"Rank":23,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"BornholmBitextMining":""} -{"level_0":23,"index":21,"Rank":24,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"BornholmBitextMining":""} +{"Rank":1,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":40.52,"BornholmBitextMining":43.89,"BornholmBitextMining (dan-Latn)":37.15} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":39.81,"BornholmBitextMining":46.4,"BornholmBitextMining (dan-Latn)":33.22} +{"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":36.88,"BornholmBitextMining":44.16,"BornholmBitextMining (dan-Latn)":29.61} +{"Rank":4,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":29.68,"BornholmBitextMining":29.68,"BornholmBitextMining (dan-Latn)":29.68} +{"Rank":5,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","BornholmBitextMining":"","BornholmBitextMining (dan-Latn)":45.13} +{"Rank":6,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","BornholmBitextMining":6.6,"BornholmBitextMining (dan-Latn)":""} +{"Rank":7,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":"","BornholmBitextMining":0.85,"BornholmBitextMining (dan-Latn)":""} +{"Rank":8,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","BornholmBitextMining":14.08,"BornholmBitextMining (dan-Latn)":""} +{"Rank":9,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","BornholmBitextMining":9.88,"BornholmBitextMining (dan-Latn)":""} +{"Rank":10,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":"","BornholmBitextMining":4.53,"BornholmBitextMining (dan-Latn)":""} +{"Rank":11,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":"","BornholmBitextMining":11.65,"BornholmBitextMining (dan-Latn)":""} +{"Rank":12,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","BornholmBitextMining":40.09,"BornholmBitextMining (dan-Latn)":""} +{"Rank":13,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","BornholmBitextMining":40.15,"BornholmBitextMining (dan-Latn)":""} +{"Rank":14,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","BornholmBitextMining":40.27,"BornholmBitextMining (dan-Latn)":""} +{"Rank":15,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","BornholmBitextMining":1.44,"BornholmBitextMining (dan-Latn)":""} +{"Rank":16,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"Average":"","BornholmBitextMining":6.08,"BornholmBitextMining (dan-Latn)":""} +{"Rank":17,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"Average":"","BornholmBitextMining":2.9,"BornholmBitextMining (dan-Latn)":""} +{"Rank":18,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","BornholmBitextMining":"","BornholmBitextMining (dan-Latn)":45.63} +{"Rank":19,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","BornholmBitextMining":"","BornholmBitextMining (dan-Latn)":35.25} +{"Rank":20,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","BornholmBitextMining":"","BornholmBitextMining (dan-Latn)":27.44} +{"Rank":21,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","BornholmBitextMining":"","BornholmBitextMining (dan-Latn)":19.67} +{"Rank":22,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","BornholmBitextMining":"","BornholmBitextMining (dan-Latn)":18.18} +{"Rank":23,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","BornholmBitextMining":6.34,"BornholmBitextMining (dan-Latn)":""} +{"Rank":24,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","BornholmBitextMining":4.42,"BornholmBitextMining (dan-Latn)":""} diff --git a/boards_data/da/data_tasks/Classification/default.jsonl b/boards_data/da/data_tasks/Classification/default.jsonl index b73469428354a30a09bf54ba3773ef25908df53f..0db151fa509fb8152faba7dc76eaa5be2458dc72 100644 --- a/boards_data/da/data_tasks/Classification/default.jsonl +++ b/boards_data/da/data_tasks/Classification/default.jsonl @@ -1,47 +1,32 @@ -{"level_0":0,"index":17,"Rank":1,"Model":"dfm-sentence-encoder-large-1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":61.62,"AngryTweetsClassification":54.42,"DKHateClassification":63.19,"DanishPoliticalCommentsClassification":37.76,"LccSentimentClassification":58.07,"MassiveIntentClassification (da)":65.83,"MassiveScenarioClassification (da)":71.61,"NordicLangClassification":75.98,"ScalaDaClassification":66.09} -{"level_0":1,"index":24,"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":59.94,"AngryTweetsClassification":54.95,"DKHateClassification":66.02,"DanishPoliticalCommentsClassification":38.27,"LccSentimentClassification":59.6,"MassiveIntentClassification (da)":60.16,"MassiveScenarioClassification (da)":67.46,"NordicLangClassification":82.29,"ScalaDaClassification":50.77} -{"level_0":2,"index":16,"Rank":3,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":59.16,"AngryTweetsClassification":53.8,"DKHateClassification":60.09,"DanishPoliticalCommentsClassification":36.6,"LccSentimentClassification":57.33,"MassiveIntentClassification (da)":60.55,"MassiveScenarioClassification (da)":64.16,"NordicLangClassification":77.68,"ScalaDaClassification":63.08} -{"level_0":3,"index":13,"Rank":4,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":58.9,"AngryTweetsClassification":52.14,"DKHateClassification":62.13,"DanishPoliticalCommentsClassification":35.04,"LccSentimentClassification":56.27,"MassiveIntentClassification (da)":57.03,"MassiveScenarioClassification (da)":60.43,"NordicLangClassification":85.27,"ScalaDaClassification":62.85} -{"level_0":4,"index":23,"Rank":5,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":58.61,"AngryTweetsClassification":54.65,"DKHateClassification":63.53,"DanishPoliticalCommentsClassification":36.69,"LccSentimentClassification":59.67,"MassiveIntentClassification (da)":60.16,"MassiveScenarioClassification (da)":67.46,"NordicLangClassification":75.94,"ScalaDaClassification":50.79} -{"level_0":5,"index":45,"Rank":6,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":58.31,"AngryTweetsClassification":54.28,"DKHateClassification":59.3,"DanishPoliticalCommentsClassification":39.81,"LccSentimentClassification":58.0,"MassiveIntentClassification (da)":54.68,"MassiveScenarioClassification (da)":59.56,"NordicLangClassification":74.25,"ScalaDaClassification":66.59} -{"level_0":6,"index":12,"Rank":7,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":57.68,"AngryTweetsClassification":52.14,"DKHateClassification":61.73,"DanishPoliticalCommentsClassification":34.84,"LccSentimentClassification":51.4,"MassiveIntentClassification (da)":56.69,"MassiveScenarioClassification (da)":61.93,"NordicLangClassification":84.69,"ScalaDaClassification":57.99} -{"level_0":7,"index":30,"Rank":8,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"Average":56.34,"AngryTweetsClassification":52.48,"DKHateClassification":58.78,"DanishPoliticalCommentsClassification":34.14,"LccSentimentClassification":54.07,"MassiveIntentClassification (da)":53.16,"MassiveScenarioClassification (da)":57.17,"NordicLangClassification":82.67,"ScalaDaClassification":58.25} -{"level_0":8,"index":26,"Rank":9,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":56.12,"AngryTweetsClassification":53.57,"DKHateClassification":60.73,"DanishPoliticalCommentsClassification":34.38,"LccSentimentClassification":57.87,"MassiveIntentClassification (da)":54.63,"MassiveScenarioClassification (da)":62.34,"NordicLangClassification":75.15,"ScalaDaClassification":50.3} -{"level_0":9,"index":18,"Rank":10,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.03,"AngryTweetsClassification":49.62,"DKHateClassification":69.97,"DanishPoliticalCommentsClassification":37.59,"LccSentimentClassification":54.27,"MassiveIntentClassification (da)":62.03,"MassiveScenarioClassification (da)":67.76,"NordicLangClassification":48.4,"ScalaDaClassification":50.63} -{"level_0":10,"index":31,"Rank":11,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"Average":54.21,"AngryTweetsClassification":49.04,"DKHateClassification":62.71,"DanishPoliticalCommentsClassification":33.53,"LccSentimentClassification":46.93,"MassiveIntentClassification (da)":45.98,"MassiveScenarioClassification (da)":50.51,"NordicLangClassification":84.25,"ScalaDaClassification":60.72} -{"level_0":11,"index":46,"Rank":12,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":52.14,"AngryTweetsClassification":52.41,"DKHateClassification":56.78,"DanishPoliticalCommentsClassification":34.03,"LccSentimentClassification":52.27,"MassiveIntentClassification (da)":41.06,"MassiveScenarioClassification (da)":43.91,"NordicLangClassification":79.39,"ScalaDaClassification":57.3} -{"level_0":12,"index":20,"Rank":13,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":47.39,"AngryTweetsClassification":46.14,"DKHateClassification":58.72,"DanishPoliticalCommentsClassification":28.67,"LccSentimentClassification":42.13,"MassiveIntentClassification (da)":42.29,"MassiveScenarioClassification (da)":52.95,"NordicLangClassification":58.3,"ScalaDaClassification":49.9} -{"level_0":13,"index":19,"Rank":14,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":47.02,"AngryTweetsClassification":45.06,"DKHateClassification":58.51,"DanishPoliticalCommentsClassification":28.43,"LccSentimentClassification":37.47,"MassiveIntentClassification (da)":44.25,"MassiveScenarioClassification (da)":52.99,"NordicLangClassification":59.34,"ScalaDaClassification":50.08} -{"level_0":14,"index":8,"Rank":15,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":46.67,"AngryTweetsClassification":44.46,"DKHateClassification":59.36,"DanishPoliticalCommentsClassification":28.32,"LccSentimentClassification":47.2,"MassiveIntentClassification (da)":42.84,"MassiveScenarioClassification (da)":49.64,"NordicLangClassification":51.45,"ScalaDaClassification":50.12} -{"level_0":15,"index":29,"Rank":16,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":46.33,"AngryTweetsClassification":47.91,"DKHateClassification":59.45,"DanishPoliticalCommentsClassification":31.89,"LccSentimentClassification":47.93,"MassiveIntentClassification (da)":26.3,"MassiveScenarioClassification (da)":28.93,"NordicLangClassification":57.82,"ScalaDaClassification":70.41} -{"level_0":16,"index":22,"Rank":17,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":45.66,"AngryTweetsClassification":43.6,"DKHateClassification":57.57,"DanishPoliticalCommentsClassification":28.37,"LccSentimentClassification":40.27,"MassiveIntentClassification (da)":41.89,"MassiveScenarioClassification (da)":49.93,"NordicLangClassification":53.47,"ScalaDaClassification":50.15} -{"level_0":17,"index":6,"Rank":18,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":45.58,"AngryTweetsClassification":44.58,"DKHateClassification":55.53,"DanishPoliticalCommentsClassification":28.97,"LccSentimentClassification":41.2,"MassiveIntentClassification (da)":37.98,"MassiveScenarioClassification (da)":40.44,"NordicLangClassification":62.45,"ScalaDaClassification":53.53} -{"level_0":18,"index":34,"Rank":19,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":44.46,"AngryTweetsClassification":42.49,"DKHateClassification":55.05,"DanishPoliticalCommentsClassification":26.96,"LccSentimentClassification":38.47,"MassiveIntentClassification (da)":40.99,"MassiveScenarioClassification (da)":47.01,"NordicLangClassification":54.71,"ScalaDaClassification":50.03} -{"level_0":19,"index":7,"Rank":20,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":33.61,"AngryTweetsClassification":40.52,"DKHateClassification":52.28,"DanishPoliticalCommentsClassification":25.17,"LccSentimentClassification":36.67,"MassiveIntentClassification (da)":6.51,"MassiveScenarioClassification (da)":11.5,"NordicLangClassification":44.53,"ScalaDaClassification":51.66} -{"level_0":20,"index":0,"Rank":21,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":45.61,"MassiveScenarioClassification (da)":54.87,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":21,"index":1,"Rank":22,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":46.17,"DKHateClassification":55.9,"DanishPoliticalCommentsClassification":28.33,"LccSentimentClassification":42.27,"MassiveIntentClassification (da)":40.37,"MassiveScenarioClassification (da)":49.35,"NordicLangClassification":"","ScalaDaClassification":50.11} -{"level_0":22,"index":2,"Rank":23,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":31.47,"MassiveScenarioClassification (da)":39.79,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":23,"index":3,"Rank":24,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":35.53,"MassiveScenarioClassification (da)":42.65,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":24,"index":4,"Rank":25,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":"","MassiveScenarioClassification (da)":"","NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":25,"index":5,"Rank":26,"Model":"e5-dansk-test-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":56.08,"DKHateClassification":"","DanishPoliticalCommentsClassification":40.88,"LccSentimentClassification":59.6,"MassiveIntentClassification (da)":"","MassiveScenarioClassification (da)":"","NordicLangClassification":61.0,"ScalaDaClassification":50.43} -{"level_0":26,"index":9,"Rank":27,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":41.0,"MassiveScenarioClassification (da)":51.92,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":27,"index":10,"Rank":28,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":44.18,"MassiveScenarioClassification (da)":45.44,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":28,"index":11,"Rank":29,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":43.56,"MassiveScenarioClassification (da)":44.75,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":29,"index":14,"Rank":30,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":55.06,"MassiveScenarioClassification (da)":64.26,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":30,"index":15,"Rank":31,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":51.23,"MassiveScenarioClassification (da)":53.52,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":31,"index":21,"Rank":32,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":72.53,"MassiveScenarioClassification (da)":76.76,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":32,"index":25,"Rank":33,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":71.81,"MassiveScenarioClassification (da)":77.61,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":33,"index":27,"Rank":34,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":49.54,"MassiveScenarioClassification (da)":52.53,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":34,"index":28,"Rank":35,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":46.93,"MassiveScenarioClassification (da)":49.11,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":35,"index":32,"Rank":36,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":58.25,"MassiveScenarioClassification (da)":65.24,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":36,"index":33,"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":44.43,"MassiveScenarioClassification (da)":49.47,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":37,"index":35,"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":"","MassiveScenarioClassification (da)":"","NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":38,"index":36,"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":38.47,"MassiveScenarioClassification (da)":39.93,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":39,"index":37,"Rank":40,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":52.33,"MassiveScenarioClassification (da)":62.55,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":40,"index":38,"Rank":41,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":45.3,"MassiveScenarioClassification (da)":54.88,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":41,"index":39,"Rank":42,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":48.42,"MassiveScenarioClassification (da)":57.28,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":42,"index":40,"Rank":43,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":38.82,"MassiveScenarioClassification (da)":48.36,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":43,"index":41,"Rank":44,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":41.76,"MassiveScenarioClassification (da)":51.44,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":44,"index":42,"Rank":45,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":47.69,"MassiveScenarioClassification (da)":55.79,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":45,"index":43,"Rank":46,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":32.25,"MassiveScenarioClassification (da)":41.14,"NordicLangClassification":"","ScalaDaClassification":""} -{"level_0":46,"index":44,"Rank":47,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AngryTweetsClassification":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","LccSentimentClassification":"","MassiveIntentClassification (da)":56.74,"MassiveScenarioClassification (da)":63.07,"NordicLangClassification":"","ScalaDaClassification":""} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":59.86,"AngryTweetsClassification":54.95,"AngryTweetsClassification (dan-Latn)":57.69,"DKHateClassification":66.02,"DanishPoliticalCommentsClassification":38.27,"DanishPoliticalCommentsClassification (dan-Latn)":39.43,"LccSentimentClassification":59.6,"LccSentimentClassification (dan-Latn)":61.53,"MassiveIntentClassification (da)":60.16,"MassiveScenarioClassification (da)":67.46,"NordicLangClassification":82.29,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":80.15,"ScalaDaClassification":50.77} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":58.13,"AngryTweetsClassification":54.65,"AngryTweetsClassification (dan-Latn)":56.28,"DKHateClassification":63.53,"DanishPoliticalCommentsClassification":36.69,"DanishPoliticalCommentsClassification (dan-Latn)":36.41,"LccSentimentClassification":59.67,"LccSentimentClassification (dan-Latn)":60.13,"MassiveIntentClassification (da)":60.16,"MassiveScenarioClassification (da)":67.46,"NordicLangClassification":75.94,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":75.85,"ScalaDaClassification":50.79} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":55.9,"AngryTweetsClassification":53.57,"AngryTweetsClassification (dan-Latn)":56.27,"DKHateClassification":60.73,"DanishPoliticalCommentsClassification":34.38,"DanishPoliticalCommentsClassification (dan-Latn)":34.82,"LccSentimentClassification":57.87,"LccSentimentClassification (dan-Latn)":58.6,"MassiveIntentClassification (da)":54.63,"MassiveScenarioClassification (da)":62.34,"NordicLangClassification":75.15,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":72.15,"ScalaDaClassification":50.3} +{"Rank":4,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":43.18,"AngryTweetsClassification":42.49,"AngryTweetsClassification (dan-Latn)":42.48,"DKHateClassification":55.05,"DanishPoliticalCommentsClassification":26.96,"DanishPoliticalCommentsClassification (dan-Latn)":26.7,"LccSentimentClassification":38.47,"LccSentimentClassification (dan-Latn)":38.53,"MassiveIntentClassification (da)":40.99,"MassiveScenarioClassification (da)":47.01,"NordicLangClassification":54.71,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":54.7,"ScalaDaClassification":50.03} +{"Rank":5,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":45.61,"MassiveScenarioClassification (da)":54.87,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":""} +{"Rank":6,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":54.68,"DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":37.69,"LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":57.2,"MassiveIntentClassification (da)":"","MassiveScenarioClassification (da)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":63.6,"ScalaDaClassification":""} +{"Rank":7,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","AngryTweetsClassification":44.58,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":55.53,"DanishPoliticalCommentsClassification":28.97,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":41.2,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":37.98,"MassiveScenarioClassification (da)":40.44,"NordicLangClassification":62.45,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":53.53} +{"Rank":8,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":"","AngryTweetsClassification":40.52,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":52.28,"DanishPoliticalCommentsClassification":25.17,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":36.67,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":6.51,"MassiveScenarioClassification (da)":11.5,"NordicLangClassification":44.53,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":51.66} +{"Rank":9,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","AngryTweetsClassification":44.46,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":59.36,"DanishPoliticalCommentsClassification":28.32,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":47.2,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":42.84,"MassiveScenarioClassification (da)":49.64,"NordicLangClassification":51.45,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":50.12} +{"Rank":10,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","AngryTweetsClassification":52.14,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":61.73,"DanishPoliticalCommentsClassification":34.84,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":51.4,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":56.69,"MassiveScenarioClassification (da)":61.93,"NordicLangClassification":84.69,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":57.99} +{"Rank":11,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":"","AngryTweetsClassification":52.14,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":62.13,"DanishPoliticalCommentsClassification":35.04,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":56.27,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":57.03,"MassiveScenarioClassification (da)":60.43,"NordicLangClassification":85.27,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":62.85} +{"Rank":12,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":"","AngryTweetsClassification":53.8,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":60.09,"DanishPoliticalCommentsClassification":36.6,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":57.33,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":60.55,"MassiveScenarioClassification (da)":64.16,"NordicLangClassification":77.68,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":63.08} +{"Rank":13,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AngryTweetsClassification":45.06,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":58.51,"DanishPoliticalCommentsClassification":28.43,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":37.47,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":44.25,"MassiveScenarioClassification (da)":52.99,"NordicLangClassification":59.34,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":50.08} +{"Rank":14,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","AngryTweetsClassification":46.14,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":58.72,"DanishPoliticalCommentsClassification":28.67,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":42.13,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":42.29,"MassiveScenarioClassification (da)":52.95,"NordicLangClassification":58.3,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":49.9} +{"Rank":15,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AngryTweetsClassification":43.6,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":57.57,"DanishPoliticalCommentsClassification":28.37,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":40.27,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":41.89,"MassiveScenarioClassification (da)":49.93,"NordicLangClassification":53.47,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":50.15} +{"Rank":16,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","AngryTweetsClassification":47.91,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":59.45,"DanishPoliticalCommentsClassification":31.89,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":47.93,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":26.3,"MassiveScenarioClassification (da)":28.93,"NordicLangClassification":57.82,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":70.41} +{"Rank":17,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"Average":"","AngryTweetsClassification":52.48,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":58.78,"DanishPoliticalCommentsClassification":34.14,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":54.07,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":53.16,"MassiveScenarioClassification (da)":57.17,"NordicLangClassification":82.67,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":58.25} +{"Rank":18,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"Average":"","AngryTweetsClassification":49.04,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":62.71,"DanishPoliticalCommentsClassification":33.53,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":46.93,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":45.98,"MassiveScenarioClassification (da)":50.51,"NordicLangClassification":84.25,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":60.72} +{"Rank":19,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":51.11,"DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":38.34,"LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":50.07,"MassiveIntentClassification (da)":58.25,"MassiveScenarioClassification (da)":65.24,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":35.39,"ScalaDaClassification":""} +{"Rank":20,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":42.87,"DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":27.07,"LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":41.93,"MassiveIntentClassification (da)":44.43,"MassiveScenarioClassification (da)":49.47,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":54.17,"ScalaDaClassification":""} +{"Rank":21,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":44.13,"DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":28.31,"LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":39.27,"MassiveIntentClassification (da)":"","MassiveScenarioClassification (da)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":50.15,"ScalaDaClassification":""} +{"Rank":22,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":38.47,"MassiveScenarioClassification (da)":39.93,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":""} +{"Rank":23,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":52.33,"MassiveScenarioClassification (da)":62.55,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":""} +{"Rank":24,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":45.3,"MassiveScenarioClassification (da)":54.88,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":""} +{"Rank":25,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":48.42,"MassiveScenarioClassification (da)":57.28,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":""} +{"Rank":26,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":50.9,"DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":37.58,"LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":54.53,"MassiveIntentClassification (da)":"","MassiveScenarioClassification (da)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":42.52,"ScalaDaClassification":""} +{"Rank":27,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":54.84,"DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":40.96,"LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":58.4,"MassiveIntentClassification (da)":"","MassiveScenarioClassification (da)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":41.57,"ScalaDaClassification":""} +{"Rank":28,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":38.82,"MassiveScenarioClassification (da)":48.36,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":""} +{"Rank":29,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":41.76,"MassiveScenarioClassification (da)":51.44,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":""} +{"Rank":30,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AngryTweetsClassification":"","AngryTweetsClassification (dan-Latn)":"","DKHateClassification":"","DanishPoliticalCommentsClassification":"","DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":"","LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":47.69,"MassiveScenarioClassification (da)":55.79,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":""} +{"Rank":31,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","AngryTweetsClassification":54.28,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":59.3,"DanishPoliticalCommentsClassification":39.81,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":58.0,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":54.68,"MassiveScenarioClassification (da)":59.56,"NordicLangClassification":74.25,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":66.59} +{"Rank":32,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","AngryTweetsClassification":52.41,"AngryTweetsClassification (dan-Latn)":"","DKHateClassification":56.78,"DanishPoliticalCommentsClassification":34.03,"DanishPoliticalCommentsClassification (dan-Latn)":"","LccSentimentClassification":52.27,"LccSentimentClassification (dan-Latn)":"","MassiveIntentClassification (da)":41.06,"MassiveScenarioClassification (da)":43.91,"NordicLangClassification":79.39,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","ScalaDaClassification":57.3} diff --git a/boards_data/de/data_tasks/Clustering/default.jsonl b/boards_data/de/data_tasks/Clustering/default.jsonl index b4691184f0a3f6fbf651f263e6e30f3eb4be73dc..373773d0dd55ede976900a238f8cb2df2f848503 100644 --- a/boards_data/de/data_tasks/Clustering/default.jsonl +++ b/boards_data/de/data_tasks/Clustering/default.jsonl @@ -1,19 +1,14 @@ -{"level_0":0,"index":5,"Rank":1,"Model":"gbert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":32.34,"BlurbsClusteringP2P":39.3,"BlurbsClusteringS2S":13.38,"TenKGnadClusteringP2P":41.69,"TenKGnadClusteringS2S":34.97} -{"level_0":1,"index":15,"Rank":2,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":29.74,"BlurbsClusteringP2P":39.91,"BlurbsClusteringS2S":15.94,"TenKGnadClusteringP2P":43.43,"TenKGnadClusteringS2S":19.69} -{"level_0":2,"index":9,"Rank":3,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.55,"BlurbsClusteringP2P":35.49,"BlurbsClusteringS2S":16.17,"TenKGnadClusteringP2P":42.84,"TenKGnadClusteringS2S":23.69} -{"level_0":3,"index":3,"Rank":4,"Model":"German_Semantic_STS_V2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.5,"BlurbsClusteringP2P":38.68,"BlurbsClusteringS2S":17.62,"TenKGnadClusteringP2P":38.0,"TenKGnadClusteringS2S":23.71} -{"level_0":4,"index":14,"Rank":5,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":27.49,"BlurbsClusteringP2P":35.33,"BlurbsClusteringS2S":13.27,"TenKGnadClusteringP2P":44.11,"TenKGnadClusteringS2S":17.26} -{"level_0":5,"index":12,"Rank":6,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":27.04,"BlurbsClusteringP2P":34.38,"BlurbsClusteringS2S":15.81,"TenKGnadClusteringP2P":35.96,"TenKGnadClusteringS2S":22.0} -{"level_0":6,"index":4,"Rank":7,"Model":"gbert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":27.0,"BlurbsClusteringP2P":35.36,"BlurbsClusteringS2S":11.27,"TenKGnadClusteringP2P":37.16,"TenKGnadClusteringS2S":24.23} -{"level_0":7,"index":16,"Rank":8,"Model":"use-cmlm-multilingual<\/a>","Model Size (Million Parameters)":472,"Memory Usage (GB, fp32)":1.76,"Average":26.9,"BlurbsClusteringP2P":29.63,"BlurbsClusteringS2S":15.24,"TenKGnadClusteringP2P":37.1,"TenKGnadClusteringS2S":25.64} -{"level_0":8,"index":11,"Rank":9,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":26.3,"BlurbsClusteringP2P":32.46,"BlurbsClusteringS2S":14.33,"TenKGnadClusteringP2P":36.13,"TenKGnadClusteringS2S":22.26} -{"level_0":9,"index":13,"Rank":10,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":26.29,"BlurbsClusteringP2P":30.59,"BlurbsClusteringS2S":11.57,"TenKGnadClusteringP2P":44.88,"TenKGnadClusteringS2S":18.11} -{"level_0":10,"index":8,"Rank":11,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":22.71,"BlurbsClusteringP2P":27.0,"BlurbsClusteringS2S":14.85,"TenKGnadClusteringP2P":21.83,"TenKGnadClusteringS2S":27.16} -{"level_0":11,"index":17,"Rank":12,"Model":"gottbert-base<\/a>","Model Size (Million Parameters)":127,"Memory Usage (GB, fp32)":0.47,"Average":21.46,"BlurbsClusteringP2P":34.49,"BlurbsClusteringS2S":8.37,"TenKGnadClusteringP2P":33.66,"TenKGnadClusteringS2S":9.34} -{"level_0":12,"index":2,"Rank":13,"Model":"cross-en-de-roberta-sentence-transformer<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":19.49,"BlurbsClusteringP2P":30.82,"BlurbsClusteringS2S":12.69,"TenKGnadClusteringP2P":23.5,"TenKGnadClusteringS2S":10.94} -{"level_0":13,"index":18,"Rank":14,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":18.94,"BlurbsClusteringP2P":29.84,"BlurbsClusteringS2S":7.29,"TenKGnadClusteringP2P":32.46,"TenKGnadClusteringS2S":6.16} -{"level_0":14,"index":0,"Rank":15,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":16.62,"BlurbsClusteringP2P":17.47,"BlurbsClusteringS2S":7.96,"TenKGnadClusteringP2P":29.79,"TenKGnadClusteringS2S":11.27} -{"level_0":15,"index":10,"Rank":16,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":10.03,"BlurbsClusteringP2P":11.37,"BlurbsClusteringS2S":8.01,"TenKGnadClusteringP2P":15.89,"TenKGnadClusteringS2S":4.84} -{"level_0":16,"index":7,"Rank":17,"Model":"gelectra-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":9.23,"BlurbsClusteringP2P":13.96,"BlurbsClusteringS2S":7.57,"TenKGnadClusteringP2P":11.49,"TenKGnadClusteringS2S":3.91} -{"level_0":17,"index":6,"Rank":18,"Model":"gelectra-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":7.73,"BlurbsClusteringP2P":10.06,"BlurbsClusteringS2S":7.74,"TenKGnadClusteringP2P":9.02,"TenKGnadClusteringS2S":4.11} -{"level_0":18,"index":1,"Rank":19,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BlurbsClusteringP2P":"","BlurbsClusteringS2S":8.0,"TenKGnadClusteringP2P":"","TenKGnadClusteringS2S":""} +{"Rank":1,"Model":"gbert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":32.34,"BlurbsClusteringP2P":39.3,"BlurbsClusteringS2S":13.38,"TenKGnadClusteringP2P":41.69,"TenKGnadClusteringS2S":34.97} +{"Rank":2,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":29.74,"BlurbsClusteringP2P":39.91,"BlurbsClusteringS2S":15.94,"TenKGnadClusteringP2P":43.43,"TenKGnadClusteringS2S":19.69} +{"Rank":3,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":27.49,"BlurbsClusteringP2P":35.33,"BlurbsClusteringS2S":13.27,"TenKGnadClusteringP2P":44.11,"TenKGnadClusteringS2S":17.26} +{"Rank":4,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":27.04,"BlurbsClusteringP2P":34.38,"BlurbsClusteringS2S":15.81,"TenKGnadClusteringP2P":35.96,"TenKGnadClusteringS2S":22.0} +{"Rank":5,"Model":"gbert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":27.0,"BlurbsClusteringP2P":35.36,"BlurbsClusteringS2S":11.27,"TenKGnadClusteringP2P":37.16,"TenKGnadClusteringS2S":24.23} +{"Rank":6,"Model":"use-cmlm-multilingual<\/a>","Model Size (Million Parameters)":472,"Memory Usage (GB, fp32)":1.76,"Average":26.9,"BlurbsClusteringP2P":29.63,"BlurbsClusteringS2S":15.24,"TenKGnadClusteringP2P":37.1,"TenKGnadClusteringS2S":25.64} +{"Rank":7,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":26.3,"BlurbsClusteringP2P":32.46,"BlurbsClusteringS2S":14.33,"TenKGnadClusteringP2P":36.13,"TenKGnadClusteringS2S":22.26} +{"Rank":8,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":26.29,"BlurbsClusteringP2P":30.59,"BlurbsClusteringS2S":11.57,"TenKGnadClusteringP2P":44.88,"TenKGnadClusteringS2S":18.11} +{"Rank":9,"Model":"gottbert-base<\/a>","Model Size (Million Parameters)":127,"Memory Usage (GB, fp32)":0.47,"Average":21.46,"BlurbsClusteringP2P":34.49,"BlurbsClusteringS2S":8.37,"TenKGnadClusteringP2P":33.66,"TenKGnadClusteringS2S":9.34} +{"Rank":10,"Model":"cross-en-de-roberta-sentence-transformer<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":19.49,"BlurbsClusteringP2P":30.82,"BlurbsClusteringS2S":12.69,"TenKGnadClusteringP2P":23.5,"TenKGnadClusteringS2S":10.94} +{"Rank":11,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":18.94,"BlurbsClusteringP2P":29.84,"BlurbsClusteringS2S":7.29,"TenKGnadClusteringP2P":32.46,"TenKGnadClusteringS2S":6.16} +{"Rank":12,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":10.03,"BlurbsClusteringP2P":11.37,"BlurbsClusteringS2S":8.01,"TenKGnadClusteringP2P":15.89,"TenKGnadClusteringS2S":4.84} +{"Rank":13,"Model":"gelectra-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":9.23,"BlurbsClusteringP2P":13.96,"BlurbsClusteringS2S":7.57,"TenKGnadClusteringP2P":11.49,"TenKGnadClusteringS2S":3.91} +{"Rank":14,"Model":"gelectra-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":7.73,"BlurbsClusteringP2P":10.06,"BlurbsClusteringS2S":7.74,"TenKGnadClusteringP2P":9.02,"TenKGnadClusteringS2S":4.11} diff --git a/boards_data/en-x/data_tasks/BitextMining/default.jsonl b/boards_data/en-x/data_tasks/BitextMining/default.jsonl index 0d0970e0a3524e5a37df4027538a7d476a5ff11e..2b640bd39f6444a120c23534109e5574bf99a864 100644 --- a/boards_data/en-x/data_tasks/BitextMining/default.jsonl +++ b/boards_data/en-x/data_tasks/BitextMining/default.jsonl @@ -1,28 +1,9 @@ -{"level_0":0,"index":12,"Rank":1,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.29,"BUCC (de-en)":99.61,"BUCC (fr-en)":99.15,"BUCC (ru-en)":97.87,"BUCC (zh-en)":99.39,"Tatoeba (afr-eng)":95.4,"Tatoeba (amh-eng)":87.14,"Tatoeba (ang-eng)":72.81,"Tatoeba (ara-eng)":91.1,"Tatoeba (arq-eng)":63.07,"Tatoeba (arz-eng)":81.86,"Tatoeba (ast-eng)":89.76,"Tatoeba (awa-eng)":91.99,"Tatoeba (aze-eng)":93.79,"Tatoeba (bel-eng)":95.55,"Tatoeba (ben-eng)":89.13,"Tatoeba (ber-eng)":55.58,"Tatoeba (bos-eng)":95.86,"Tatoeba (bre-eng)":32.94,"Tatoeba (bul-eng)":94.38,"Tatoeba (cat-eng)":94.93,"Tatoeba (cbk-eng)":84.1,"Tatoeba (ceb-eng)":70.78,"Tatoeba (ces-eng)":96.57,"Tatoeba (cha-eng)":51.46,"Tatoeba (cmn-eng)":96.37,"Tatoeba (cor-eng)":11.82,"Tatoeba (csb-eng)":66.59,"Tatoeba (cym-eng)":89.97,"Tatoeba (dan-eng)":95.3,"Tatoeba (deu-eng)":99.33,"Tatoeba (dsb-eng)":72.0,"Tatoeba (dtp-eng)":14.51,"Tatoeba (ell-eng)":95.12,"Tatoeba (epo-eng)":98.48,"Tatoeba (est-eng)":88.03,"Tatoeba (eus-eng)":84.72,"Tatoeba (fao-eng)":86.77,"Tatoeba (fin-eng)":96.92,"Tatoeba (fra-eng)":95.02,"Tatoeba (fry-eng)":82.47,"Tatoeba (gla-eng)":78.33,"Tatoeba (gle-eng)":84.99,"Tatoeba (glg-eng)":96.22,"Tatoeba (gsw-eng)":61.9,"Tatoeba (heb-eng)":91.46,"Tatoeba (hin-eng)":97.6,"Tatoeba (hrv-eng)":96.75,"Tatoeba (hsb-eng)":79.45,"Tatoeba (hun-eng)":94.33,"Tatoeba (hye-eng)":93.52,"Tatoeba (ido-eng)":91.92,"Tatoeba (ile-eng)":89.78,"Tatoeba (ina-eng)":96.27,"Tatoeba (ind-eng)":94.42,"Tatoeba (isl-eng)":95.0,"Tatoeba (ita-eng)":94.67,"Tatoeba (jav-eng)":85.28,"Tatoeba (jpn-eng)":96.12,"Tatoeba (kab-eng)":57.85,"Tatoeba (kat-eng)":92.47,"Tatoeba (kaz-eng)":87.72,"Tatoeba (khm-eng)":73.27,"Tatoeba (kor-eng)":91.97,"Tatoeba (kur-eng)":77.33,"Tatoeba (kzj-eng)":14.98,"Tatoeba (lat-eng)":74.51,"Tatoeba (lfn-eng)":80.68,"Tatoeba (lit-eng)":92.01,"Tatoeba (lvs-eng)":92.81,"Tatoeba (mal-eng)":98.93,"Tatoeba (mar-eng)":92.58,"Tatoeba (max-eng)":73.44,"Tatoeba (mhr-eng)":16.92,"Tatoeba (mkd-eng)":92.49,"Tatoeba (mon-eng)":95.49,"Tatoeba (nds-eng)":86.32,"Tatoeba (nld-eng)":97.2,"Tatoeba (nno-eng)":94.26,"Tatoeba (nob-eng)":98.2,"Tatoeba (nov-eng)":83.0,"Tatoeba (oci-eng)":71.91,"Tatoeba (orv-eng)":58.29,"Tatoeba (pam-eng)":19.63,"Tatoeba (pes-eng)":94.57,"Tatoeba (pms-eng)":77.98,"Tatoeba (pol-eng)":97.27,"Tatoeba (por-eng)":94.91,"Tatoeba (ron-eng)":97.18,"Tatoeba (rus-eng)":93.57,"Tatoeba (slk-eng)":95.13,"Tatoeba (slv-eng)":91.91,"Tatoeba (spa-eng)":98.6,"Tatoeba (sqi-eng)":96.75,"Tatoeba (srp-eng)":94.53,"Tatoeba (swe-eng)":95.72,"Tatoeba (swg-eng)":77.68,"Tatoeba (swh-eng)":80.91,"Tatoeba (tam-eng)":90.65,"Tatoeba (tat-eng)":84.61,"Tatoeba (tel-eng)":96.08,"Tatoeba (tgl-eng)":95.85,"Tatoeba (tha-eng)":96.72,"Tatoeba (tuk-eng)":49.64,"Tatoeba (tur-eng)":98.27,"Tatoeba (tzl-eng)":61.09,"Tatoeba (uig-eng)":86.82,"Tatoeba (ukr-eng)":94.78,"Tatoeba (urd-eng)":93.41,"Tatoeba (uzb-eng)":80.55,"Tatoeba (vie-eng)":97.1,"Tatoeba (war-eng)":74.4,"Tatoeba (wuu-eng)":92.34,"Tatoeba (xho-eng)":88.26,"Tatoeba (yid-eng)":90.38,"Tatoeba (yue-eng)":93.47,"Tatoeba (zsm-eng)":96.48} -{"level_0":1,"index":8,"Rank":2,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.67,"BUCC (de-en)":98.82,"BUCC (fr-en)":98.09,"BUCC (ru-en)":97.37,"BUCC (zh-en)":98.72,"Tatoeba (afr-eng)":97.47,"Tatoeba (amh-eng)":91.67,"Tatoeba (ang-eng)":46.34,"Tatoeba (ara-eng)":93.83,"Tatoeba (arq-eng)":75.41,"Tatoeba (arz-eng)":86.76,"Tatoeba (ast-eng)":90.81,"Tatoeba (awa-eng)":87.86,"Tatoeba (aze-eng)":89.12,"Tatoeba (bel-eng)":95.68,"Tatoeba (ben-eng)":89.02,"Tatoeba (ber-eng)":29.29,"Tatoeba (bos-eng)":95.9,"Tatoeba (bre-eng)":8.48,"Tatoeba (bul-eng)":94.52,"Tatoeba (cat-eng)":96.65,"Tatoeba (cbk-eng)":80.47,"Tatoeba (ceb-eng)":74.67,"Tatoeba (ces-eng)":96.88,"Tatoeba (cha-eng)":25.11,"Tatoeba (cmn-eng)":93.8,"Tatoeba (cor-eng)":9.98,"Tatoeba (csb-eng)":77.98,"Tatoeba (cym-eng)":91.3,"Tatoeba (dan-eng)":97.09,"Tatoeba (deu-eng)":99.13,"Tatoeba (dsb-eng)":64.48,"Tatoeba (dtp-eng)":7.95,"Tatoeba (ell-eng)":95.47,"Tatoeba (epo-eng)":98.47,"Tatoeba (est-eng)":97.17,"Tatoeba (eus-eng)":95.38,"Tatoeba (fao-eng)":93.6,"Tatoeba (fin-eng)":97.83,"Tatoeba (fra-eng)":95.37,"Tatoeba (fry-eng)":59.45,"Tatoeba (gla-eng)":80.89,"Tatoeba (gle-eng)":91.63,"Tatoeba (glg-eng)":97.28,"Tatoeba (gsw-eng)":54.83,"Tatoeba (heb-eng)":92.34,"Tatoeba (hin-eng)":97.07,"Tatoeba (hrv-eng)":97.65,"Tatoeba (hsb-eng)":77.87,"Tatoeba (hun-eng)":96.93,"Tatoeba (hye-eng)":95.15,"Tatoeba (ido-eng)":93.67,"Tatoeba (ile-eng)":81.0,"Tatoeba (ina-eng)":88.66,"Tatoeba (ind-eng)":94.5,"Tatoeba (isl-eng)":96.47,"Tatoeba (ita-eng)":95.95,"Tatoeba (jav-eng)":85.76,"Tatoeba (jpn-eng)":94.67,"Tatoeba (kab-eng)":83.66,"Tatoeba (kat-eng)":93.69,"Tatoeba (kaz-eng)":88.12,"Tatoeba (khm-eng)":70.2,"Tatoeba (kor-eng)":89.99,"Tatoeba (kur-eng)":34.22,"Tatoeba (kzj-eng)":8.62,"Tatoeba (lat-eng)":36.64,"Tatoeba (lfn-eng)":65.54,"Tatoeba (lit-eng)":97.43,"Tatoeba (lvs-eng)":95.45,"Tatoeba (mal-eng)":97.67,"Tatoeba (mar-eng)":93.35,"Tatoeba (max-eng)":72.73,"Tatoeba (mhr-eng)":11.87,"Tatoeba (mkd-eng)":95.98,"Tatoeba (mon-eng)":91.59,"Tatoeba (nds-eng)":54.18,"Tatoeba (nld-eng)":97.2,"Tatoeba (nno-eng)":97.31,"Tatoeba (nob-eng)":99.07,"Tatoeba (nov-eng)":71.25,"Tatoeba (oci-eng)":84.57,"Tatoeba (orv-eng)":47.07,"Tatoeba (pam-eng)":10.3,"Tatoeba (pes-eng)":94.37,"Tatoeba (pms-eng)":66.63,"Tatoeba (pol-eng)":97.7,"Tatoeba (por-eng)":95.49,"Tatoeba (ron-eng)":97.6,"Tatoeba (rus-eng)":94.35,"Tatoeba (slk-eng)":97.63,"Tatoeba (slv-eng)":95.99,"Tatoeba (spa-eng)":98.53,"Tatoeba (sqi-eng)":98.55,"Tatoeba (srp-eng)":94.87,"Tatoeba (swe-eng)":97.03,"Tatoeba (swg-eng)":67.69,"Tatoeba (swh-eng)":91.98,"Tatoeba (tam-eng)":88.08,"Tatoeba (tat-eng)":87.05,"Tatoeba (tel-eng)":91.6,"Tatoeba (tgl-eng)":96.95,"Tatoeba (tha-eng)":96.05,"Tatoeba (tuk-eng)":91.82,"Tatoeba (tur-eng)":98.27,"Tatoeba (tzl-eng)":39.48,"Tatoeba (uig-eng)":90.07,"Tatoeba (ukr-eng)":94.3,"Tatoeba (urd-eng)":93.17,"Tatoeba (uzb-eng)":85.26,"Tatoeba (vie-eng)":97.17,"Tatoeba (war-eng)":82.83,"Tatoeba (wuu-eng)":81.79,"Tatoeba (xho-eng)":94.84,"Tatoeba (yid-eng)":94.3,"Tatoeba (yue-eng)":88.44,"Tatoeba (zsm-eng)":96.07} -{"level_0":2,"index":19,"Rank":3,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":81.75,"BUCC (de-en)":99.35,"BUCC (fr-en)":98.72,"BUCC (ru-en)":97.78,"BUCC (zh-en)":99.16,"Tatoeba (afr-eng)":96.18,"Tatoeba (amh-eng)":91.47,"Tatoeba (ang-eng)":59.28,"Tatoeba (ara-eng)":88.8,"Tatoeba (arq-eng)":42.69,"Tatoeba (arz-eng)":76.0,"Tatoeba (ast-eng)":90.68,"Tatoeba (awa-eng)":71.7,"Tatoeba (aze-eng)":94.93,"Tatoeba (bel-eng)":95.0,"Tatoeba (ben-eng)":88.55,"Tatoeba (ber-eng)":8.4,"Tatoeba (bos-eng)":94.92,"Tatoeba (bre-eng)":15.07,"Tatoeba (bul-eng)":94.58,"Tatoeba (cat-eng)":95.38,"Tatoeba (cbk-eng)":79.44,"Tatoeba (ceb-eng)":64.42,"Tatoeba (ces-eng)":96.68,"Tatoeba (cha-eng)":31.77,"Tatoeba (cmn-eng)":95.1,"Tatoeba (cor-eng)":10.11,"Tatoeba (csb-eng)":52.57,"Tatoeba (cym-eng)":92.0,"Tatoeba (dan-eng)":95.71,"Tatoeba (deu-eng)":99.2,"Tatoeba (dsb-eng)":64.81,"Tatoeba (dtp-eng)":10.85,"Tatoeba (ell-eng)":95.35,"Tatoeba (epo-eng)":98.2,"Tatoeba (est-eng)":96.55,"Tatoeba (eus-eng)":95.01,"Tatoeba (fao-eng)":87.4,"Tatoeba (fin-eng)":96.37,"Tatoeba (fra-eng)":94.86,"Tatoeba (fry-eng)":89.31,"Tatoeba (gla-eng)":85.66,"Tatoeba (gle-eng)":93.8,"Tatoeba (glg-eng)":96.82,"Tatoeba (gsw-eng)":46.5,"Tatoeba (heb-eng)":91.53,"Tatoeba (hin-eng)":96.87,"Tatoeba (hrv-eng)":96.95,"Tatoeba (hsb-eng)":67.11,"Tatoeba (hun-eng)":96.55,"Tatoeba (hye-eng)":94.09,"Tatoeba (ido-eng)":89.42,"Tatoeba (ile-eng)":85.58,"Tatoeba (ina-eng)":95.37,"Tatoeba (ind-eng)":93.66,"Tatoeba (isl-eng)":94.75,"Tatoeba (ita-eng)":92.72,"Tatoeba (jav-eng)":79.77,"Tatoeba (jpn-eng)":95.38,"Tatoeba (kab-eng)":4.31,"Tatoeba (kat-eng)":95.02,"Tatoeba (kaz-eng)":87.49,"Tatoeba (khm-eng)":78.37,"Tatoeba (kor-eng)":90.95,"Tatoeba (kur-eng)":83.59,"Tatoeba (kzj-eng)":11.33,"Tatoeba (lat-eng)":80.07,"Tatoeba (lfn-eng)":67.54,"Tatoeba (lit-eng)":96.47,"Tatoeba (lvs-eng)":95.88,"Tatoeba (mal-eng)":98.45,"Tatoeba (mar-eng)":92.65,"Tatoeba (max-eng)":63.26,"Tatoeba (mhr-eng)":15.74,"Tatoeba (mkd-eng)":93.6,"Tatoeba (mon-eng)":95.91,"Tatoeba (nds-eng)":79.42,"Tatoeba (nld-eng)":96.07,"Tatoeba (nno-eng)":94.48,"Tatoeba (nob-eng)":98.4,"Tatoeba (nov-eng)":74.38,"Tatoeba (oci-eng)":65.81,"Tatoeba (orv-eng)":38.93,"Tatoeba (pam-eng)":10.73,"Tatoeba (pes-eng)":94.7,"Tatoeba (pms-eng)":64.57,"Tatoeba (pol-eng)":97.22,"Tatoeba (por-eng)":94.14,"Tatoeba (ron-eng)":96.92,"Tatoeba (rus-eng)":93.75,"Tatoeba (slk-eng)":96.5,"Tatoeba (slv-eng)":96.03,"Tatoeba (spa-eng)":98.4,"Tatoeba (sqi-eng)":96.76,"Tatoeba (srp-eng)":94.43,"Tatoeba (swe-eng)":95.63,"Tatoeba (swg-eng)":59.36,"Tatoeba (swh-eng)":84.5,"Tatoeba (tam-eng)":89.0,"Tatoeba (tat-eng)":85.92,"Tatoeba (tel-eng)":97.86,"Tatoeba (tgl-eng)":96.02,"Tatoeba (tha-eng)":96.14,"Tatoeba (tuk-eng)":75.27,"Tatoeba (tur-eng)":98.0,"Tatoeba (tzl-eng)":58.88,"Tatoeba (uig-eng)":92.4,"Tatoeba (ukr-eng)":93.97,"Tatoeba (urd-eng)":93.22,"Tatoeba (uzb-eng)":84.23,"Tatoeba (vie-eng)":97.2,"Tatoeba (war-eng)":60.29,"Tatoeba (wuu-eng)":90.18,"Tatoeba (xho-eng)":91.55,"Tatoeba (yid-eng)":88.79,"Tatoeba (yue-eng)":89.58,"Tatoeba (zsm-eng)":95.62} -{"level_0":3,"index":11,"Rank":4,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":76.52,"BUCC (de-en)":99.38,"BUCC (fr-en)":98.11,"BUCC (ru-en)":97.52,"BUCC (zh-en)":99.2,"Tatoeba (afr-eng)":90.26,"Tatoeba (amh-eng)":80.69,"Tatoeba (ang-eng)":40.18,"Tatoeba (ara-eng)":85.47,"Tatoeba (arq-eng)":41.56,"Tatoeba (arz-eng)":74.73,"Tatoeba (ast-eng)":81.76,"Tatoeba (awa-eng)":72.27,"Tatoeba (aze-eng)":87.61,"Tatoeba (bel-eng)":91.2,"Tatoeba (ben-eng)":83.02,"Tatoeba (ber-eng)":38.89,"Tatoeba (bos-eng)":92.86,"Tatoeba (bre-eng)":11.1,"Tatoeba (bul-eng)":92.93,"Tatoeba (cat-eng)":91.03,"Tatoeba (cbk-eng)":69.15,"Tatoeba (ceb-eng)":55.31,"Tatoeba (ces-eng)":94.89,"Tatoeba (cha-eng)":27.21,"Tatoeba (cmn-eng)":95.28,"Tatoeba (cor-eng)":6.28,"Tatoeba (csb-eng)":36.98,"Tatoeba (cym-eng)":76.21,"Tatoeba (dan-eng)":95.08,"Tatoeba (deu-eng)":99.07,"Tatoeba (dsb-eng)":48.44,"Tatoeba (dtp-eng)":7.0,"Tatoeba (ell-eng)":93.88,"Tatoeba (epo-eng)":96.01,"Tatoeba (est-eng)":85.03,"Tatoeba (eus-eng)":77.82,"Tatoeba (fao-eng)":72.62,"Tatoeba (fin-eng)":95.44,"Tatoeba (fra-eng)":93.42,"Tatoeba (fry-eng)":63.43,"Tatoeba (gla-eng)":59.01,"Tatoeba (gle-eng)":71.48,"Tatoeba (glg-eng)":93.34,"Tatoeba (gsw-eng)":51.65,"Tatoeba (heb-eng)":86.52,"Tatoeba (hin-eng)":94.48,"Tatoeba (hrv-eng)":96.15,"Tatoeba (hsb-eng)":58.7,"Tatoeba (hun-eng)":94.01,"Tatoeba (hye-eng)":90.92,"Tatoeba (ido-eng)":83.63,"Tatoeba (ile-eng)":79.15,"Tatoeba (ina-eng)":93.61,"Tatoeba (ind-eng)":92.9,"Tatoeba (isl-eng)":91.96,"Tatoeba (ita-eng)":93.29,"Tatoeba (jav-eng)":75.46,"Tatoeba (jpn-eng)":95.28,"Tatoeba (kab-eng)":36.54,"Tatoeba (kat-eng)":84.09,"Tatoeba (kaz-eng)":79.44,"Tatoeba (khm-eng)":60.02,"Tatoeba (kor-eng)":90.65,"Tatoeba (kur-eng)":66.83,"Tatoeba (kzj-eng)":7.91,"Tatoeba (lat-eng)":53.3,"Tatoeba (lfn-eng)":63.02,"Tatoeba (lit-eng)":88.48,"Tatoeba (lvs-eng)":89.83,"Tatoeba (mal-eng)":97.7,"Tatoeba (mar-eng)":88.58,"Tatoeba (max-eng)":63.42,"Tatoeba (mhr-eng)":6.79,"Tatoeba (mkd-eng)":85.5,"Tatoeba (mon-eng)":87.53,"Tatoeba (nds-eng)":69.52,"Tatoeba (nld-eng)":96.63,"Tatoeba (nno-eng)":91.4,"Tatoeba (nob-eng)":97.2,"Tatoeba (nov-eng)":71.56,"Tatoeba (oci-eng)":54.91,"Tatoeba (orv-eng)":39.8,"Tatoeba (pam-eng)":9.28,"Tatoeba (pes-eng)":92.14,"Tatoeba (pms-eng)":59.87,"Tatoeba (pol-eng)":96.6,"Tatoeba (por-eng)":93.63,"Tatoeba (ron-eng)":94.87,"Tatoeba (rus-eng)":92.32,"Tatoeba (slk-eng)":93.13,"Tatoeba (slv-eng)":89.65,"Tatoeba (spa-eng)":97.1,"Tatoeba (sqi-eng)":94.7,"Tatoeba (srp-eng)":93.1,"Tatoeba (swe-eng)":95.3,"Tatoeba (swg-eng)":56.83,"Tatoeba (swh-eng)":71.61,"Tatoeba (tam-eng)":88.23,"Tatoeba (tat-eng)":73.65,"Tatoeba (tel-eng)":91.34,"Tatoeba (tgl-eng)":92.0,"Tatoeba (tha-eng)":95.38,"Tatoeba (tuk-eng)":33.15,"Tatoeba (tur-eng)":96.27,"Tatoeba (tzl-eng)":53.16,"Tatoeba (uig-eng)":72.08,"Tatoeba (ukr-eng)":93.32,"Tatoeba (urd-eng)":89.21,"Tatoeba (uzb-eng)":72.35,"Tatoeba (vie-eng)":97.0,"Tatoeba (war-eng)":62.26,"Tatoeba (wuu-eng)":86.23,"Tatoeba (xho-eng)":80.87,"Tatoeba (yid-eng)":76.33,"Tatoeba (yue-eng)":88.71,"Tatoeba (zsm-eng)":94.53} -{"level_0":4,"index":9,"Rank":5,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":71.07,"BUCC (de-en)":99.5,"BUCC (fr-en)":99.09,"BUCC (ru-en)":97.84,"BUCC (zh-en)":99.23,"Tatoeba (afr-eng)":87.19,"Tatoeba (amh-eng)":22.05,"Tatoeba (ang-eng)":81.59,"Tatoeba (ara-eng)":88.76,"Tatoeba (arq-eng)":48.55,"Tatoeba (arz-eng)":70.73,"Tatoeba (ast-eng)":80.94,"Tatoeba (awa-eng)":67.45,"Tatoeba (aze-eng)":76.23,"Tatoeba (bel-eng)":88.09,"Tatoeba (ben-eng)":81.82,"Tatoeba (ber-eng)":7.62,"Tatoeba (bos-eng)":91.54,"Tatoeba (bre-eng)":14.2,"Tatoeba (bul-eng)":93.98,"Tatoeba (cat-eng)":91.89,"Tatoeba (cbk-eng)":81.32,"Tatoeba (ceb-eng)":43.15,"Tatoeba (ces-eng)":94.93,"Tatoeba (cha-eng)":44.8,"Tatoeba (cmn-eng)":95.9,"Tatoeba (cor-eng)":9.19,"Tatoeba (csb-eng)":62.92,"Tatoeba (cym-eng)":72.32,"Tatoeba (dan-eng)":93.92,"Tatoeba (deu-eng)":99.47,"Tatoeba (dsb-eng)":65.43,"Tatoeba (dtp-eng)":11.74,"Tatoeba (ell-eng)":91.42,"Tatoeba (epo-eng)":87.79,"Tatoeba (est-eng)":62.89,"Tatoeba (eus-eng)":40.51,"Tatoeba (fao-eng)":70.33,"Tatoeba (fin-eng)":90.69,"Tatoeba (fra-eng)":95.77,"Tatoeba (fry-eng)":70.98,"Tatoeba (gla-eng)":63.53,"Tatoeba (gle-eng)":73.81,"Tatoeba (glg-eng)":90.44,"Tatoeba (gsw-eng)":52.21,"Tatoeba (heb-eng)":82.82,"Tatoeba (hin-eng)":95.28,"Tatoeba (hrv-eng)":93.97,"Tatoeba (hsb-eng)":74.24,"Tatoeba (hun-eng)":89.82,"Tatoeba (hye-eng)":63.99,"Tatoeba (ido-eng)":75.11,"Tatoeba (ile-eng)":83.97,"Tatoeba (ina-eng)":95.52,"Tatoeba (ind-eng)":93.48,"Tatoeba (isl-eng)":87.57,"Tatoeba (ita-eng)":91.94,"Tatoeba (jav-eng)":38.49,"Tatoeba (jpn-eng)":94.0,"Tatoeba (kab-eng)":3.81,"Tatoeba (kat-eng)":59.67,"Tatoeba (kaz-eng)":46.89,"Tatoeba (khm-eng)":37.31,"Tatoeba (kor-eng)":91.29,"Tatoeba (kur-eng)":29.93,"Tatoeba (kzj-eng)":12.19,"Tatoeba (lat-eng)":87.94,"Tatoeba (lfn-eng)":73.89,"Tatoeba (lit-eng)":69.2,"Tatoeba (lvs-eng)":66.01,"Tatoeba (mal-eng)":55.86,"Tatoeba (mar-eng)":68.99,"Tatoeba (max-eng)":64.8,"Tatoeba (mhr-eng)":16.24,"Tatoeba (mkd-eng)":86.9,"Tatoeba (mon-eng)":37.79,"Tatoeba (nds-eng)":77.42,"Tatoeba (nld-eng)":96.4,"Tatoeba (nno-eng)":87.24,"Tatoeba (nob-eng)":96.86,"Tatoeba (nov-eng)":72.4,"Tatoeba (oci-eng)":62.2,"Tatoeba (orv-eng)":59.65,"Tatoeba (pam-eng)":14.02,"Tatoeba (pes-eng)":89.67,"Tatoeba (pms-eng)":62.3,"Tatoeba (pol-eng)":96.47,"Tatoeba (por-eng)":94.44,"Tatoeba (ron-eng)":93.0,"Tatoeba (rus-eng)":93.75,"Tatoeba (slk-eng)":88.84,"Tatoeba (slv-eng)":85.99,"Tatoeba (spa-eng)":98.8,"Tatoeba (sqi-eng)":67.06,"Tatoeba (srp-eng)":92.54,"Tatoeba (swe-eng)":92.95,"Tatoeba (swg-eng)":64.75,"Tatoeba (swh-eng)":61.6,"Tatoeba (tam-eng)":72.83,"Tatoeba (tat-eng)":37.02,"Tatoeba (tel-eng)":42.79,"Tatoeba (tgl-eng)":93.14,"Tatoeba (tha-eng)":93.64,"Tatoeba (tuk-eng)":42.01,"Tatoeba (tur-eng)":94.13,"Tatoeba (tzl-eng)":49.51,"Tatoeba (uig-eng)":38.86,"Tatoeba (ukr-eng)":94.45,"Tatoeba (urd-eng)":83.62,"Tatoeba (uzb-eng)":52.73,"Tatoeba (vie-eng)":94.83,"Tatoeba (war-eng)":41.97,"Tatoeba (wuu-eng)":89.61,"Tatoeba (xho-eng)":35.96,"Tatoeba (yid-eng)":33.12,"Tatoeba (yue-eng)":89.11,"Tatoeba (zsm-eng)":94.1} -{"level_0":5,"index":10,"Rank":6,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":69.09,"BUCC (de-en)":99.13,"BUCC (fr-en)":97.59,"BUCC (ru-en)":97.2,"BUCC (zh-en)":98.3,"Tatoeba (afr-eng)":86.91,"Tatoeba (amh-eng)":74.93,"Tatoeba (ang-eng)":29.07,"Tatoeba (ara-eng)":82.59,"Tatoeba (arq-eng)":26.74,"Tatoeba (arz-eng)":66.79,"Tatoeba (ast-eng)":75.31,"Tatoeba (awa-eng)":68.39,"Tatoeba (aze-eng)":84.71,"Tatoeba (bel-eng)":86.7,"Tatoeba (ben-eng)":81.05,"Tatoeba (ber-eng)":23.58,"Tatoeba (bos-eng)":88.86,"Tatoeba (bre-eng)":5.44,"Tatoeba (bul-eng)":88.63,"Tatoeba (cat-eng)":84.09,"Tatoeba (cbk-eng)":60.54,"Tatoeba (ceb-eng)":45.46,"Tatoeba (ces-eng)":88.88,"Tatoeba (cha-eng)":16.95,"Tatoeba (cmn-eng)":93.35,"Tatoeba (cor-eng)":4.38,"Tatoeba (csb-eng)":24.56,"Tatoeba (cym-eng)":65.22,"Tatoeba (dan-eng)":91.4,"Tatoeba (deu-eng)":97.07,"Tatoeba (dsb-eng)":34.33,"Tatoeba (dtp-eng)":5.13,"Tatoeba (ell-eng)":89.96,"Tatoeba (epo-eng)":92.07,"Tatoeba (est-eng)":70.51,"Tatoeba (eus-eng)":56.16,"Tatoeba (fao-eng)":64.72,"Tatoeba (fin-eng)":86.15,"Tatoeba (fra-eng)":92.76,"Tatoeba (fry-eng)":50.88,"Tatoeba (gla-eng)":43.08,"Tatoeba (gle-eng)":58.36,"Tatoeba (glg-eng)":82.79,"Tatoeba (gsw-eng)":43.53,"Tatoeba (heb-eng)":74.14,"Tatoeba (hin-eng)":93.0,"Tatoeba (hrv-eng)":92.5,"Tatoeba (hsb-eng)":40.35,"Tatoeba (hun-eng)":84.32,"Tatoeba (hye-eng)":85.91,"Tatoeba (ido-eng)":74.39,"Tatoeba (ile-eng)":72.43,"Tatoeba (ina-eng)":86.11,"Tatoeba (ind-eng)":90.26,"Tatoeba (isl-eng)":76.9,"Tatoeba (ita-eng)":90.61,"Tatoeba (jav-eng)":61.25,"Tatoeba (jpn-eng)":90.3,"Tatoeba (kab-eng)":21.98,"Tatoeba (kat-eng)":77.83,"Tatoeba (kaz-eng)":75.56,"Tatoeba (khm-eng)":47.26,"Tatoeba (kor-eng)":83.37,"Tatoeba (kur-eng)":52.96,"Tatoeba (kzj-eng)":6.2,"Tatoeba (lat-eng)":39.58,"Tatoeba (lfn-eng)":52.85,"Tatoeba (lit-eng)":75.53,"Tatoeba (lvs-eng)":76.66,"Tatoeba (mal-eng)":96.72,"Tatoeba (mar-eng)":86.62,"Tatoeba (max-eng)":52.39,"Tatoeba (mhr-eng)":5.52,"Tatoeba (mkd-eng)":73.76,"Tatoeba (mon-eng)":78.37,"Tatoeba (nds-eng)":53.86,"Tatoeba (nld-eng)":93.2,"Tatoeba (nno-eng)":82.56,"Tatoeba (nob-eng)":95.9,"Tatoeba (nov-eng)":66.83,"Tatoeba (oci-eng)":35.79,"Tatoeba (orv-eng)":16.0,"Tatoeba (pam-eng)":6.92,"Tatoeba (pes-eng)":87.08,"Tatoeba (pms-eng)":44.61,"Tatoeba (pol-eng)":94.82,"Tatoeba (por-eng)":92.74,"Tatoeba (ron-eng)":91.27,"Tatoeba (rus-eng)":91.78,"Tatoeba (slk-eng)":86.4,"Tatoeba (slv-eng)":81.93,"Tatoeba (spa-eng)":96.97,"Tatoeba (sqi-eng)":90.06,"Tatoeba (srp-eng)":89.08,"Tatoeba (swe-eng)":91.33,"Tatoeba (swg-eng)":42.33,"Tatoeba (swh-eng)":66.81,"Tatoeba (tam-eng)":85.12,"Tatoeba (tat-eng)":66.92,"Tatoeba (tel-eng)":88.49,"Tatoeba (tgl-eng)":83.78,"Tatoeba (tha-eng)":94.4,"Tatoeba (tuk-eng)":19.66,"Tatoeba (tur-eng)":92.67,"Tatoeba (tzl-eng)":34.44,"Tatoeba (uig-eng)":63.08,"Tatoeba (ukr-eng)":88.29,"Tatoeba (urd-eng)":86.2,"Tatoeba (uzb-eng)":62.63,"Tatoeba (vie-eng)":94.68,"Tatoeba (war-eng)":47.18,"Tatoeba (wuu-eng)":78.65,"Tatoeba (xho-eng)":73.24,"Tatoeba (yid-eng)":63.2,"Tatoeba (yue-eng)":80.66,"Tatoeba (zsm-eng)":92.45} -{"level_0":6,"index":0,"Rank":7,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":67.42,"BUCC (de-en)":99.21,"BUCC (fr-en)":98.39,"BUCC (ru-en)":97.62,"BUCC (zh-en)":97.7,"Tatoeba (afr-eng)":92.59,"Tatoeba (amh-eng)":80.82,"Tatoeba (ang-eng)":25.22,"Tatoeba (ara-eng)":90.14,"Tatoeba (arq-eng)":26.63,"Tatoeba (arz-eng)":66.16,"Tatoeba (ast-eng)":76.35,"Tatoeba (awa-eng)":33.74,"Tatoeba (aze-eng)":82.41,"Tatoeba (bel-eng)":79.54,"Tatoeba (ben-eng)":89.43,"Tatoeba (ber-eng)":77.63,"Tatoeba (bos-eng)":95.86,"Tatoeba (bre-eng)":31.2,"Tatoeba (bul-eng)":93.57,"Tatoeba (cat-eng)":95.8,"Tatoeba (cbk-eng)":77.17,"Tatoeba (ceb-eng)":9.93,"Tatoeba (ces-eng)":95.52,"Tatoeba (cha-eng)":14.86,"Tatoeba (cmn-eng)":85.62,"Tatoeba (cor-eng)":4.45,"Tatoeba (csb-eng)":27.03,"Tatoeba (cym-eng)":5.85,"Tatoeba (dan-eng)":95.22,"Tatoeba (deu-eng)":99.07,"Tatoeba (dsb-eng)":42.34,"Tatoeba (dtp-eng)":7.39,"Tatoeba (ell-eng)":96.2,"Tatoeba (epo-eng)":96.61,"Tatoeba (est-eng)":96.43,"Tatoeba (eus-eng)":93.32,"Tatoeba (fao-eng)":57.04,"Tatoeba (fin-eng)":96.98,"Tatoeba (fra-eng)":94.28,"Tatoeba (fry-eng)":42.07,"Tatoeba (gla-eng)":1.52,"Tatoeba (gle-eng)":4.2,"Tatoeba (glg-eng)":96.14,"Tatoeba (gsw-eng)":27.52,"Tatoeba (heb-eng)":0.0,"Tatoeba (hin-eng)":95.32,"Tatoeba (hrv-eng)":96.72,"Tatoeba (hsb-eng)":45.75,"Tatoeba (hun-eng)":95.2,"Tatoeba (hye-eng)":88.72,"Tatoeba (ido-eng)":80.86,"Tatoeba (ile-eng)":87.88,"Tatoeba (ina-eng)":93.93,"Tatoeba (ind-eng)":92.98,"Tatoeba (isl-eng)":94.32,"Tatoeba (ita-eng)":94.32,"Tatoeba (jav-eng)":9.95,"Tatoeba (jpn-eng)":93.78,"Tatoeba (kab-eng)":65.88,"Tatoeba (kat-eng)":81.16,"Tatoeba (kaz-eng)":53.3,"Tatoeba (khm-eng)":74.19,"Tatoeba (kor-eng)":87.97,"Tatoeba (kur-eng)":19.09,"Tatoeba (kzj-eng)":4.46,"Tatoeba (lat-eng)":64.81,"Tatoeba (lfn-eng)":63.39,"Tatoeba (lit-eng)":96.2,"Tatoeba (lvs-eng)":95.33,"Tatoeba (mal-eng)":98.16,"Tatoeba (mar-eng)":92.93,"Tatoeba (max-eng)":36.96,"Tatoeba (mhr-eng)":6.86,"Tatoeba (mkd-eng)":93.63,"Tatoeba (mon-eng)":3.42,"Tatoeba (nds-eng)":77.13,"Tatoeba (nld-eng)":95.35,"Tatoeba (nno-eng)":72.75,"Tatoeba (nob-eng)":95.77,"Tatoeba (nov-eng)":60.02,"Tatoeba (oci-eng)":58.13,"Tatoeba (orv-eng)":23.24,"Tatoeba (pam-eng)":3.24,"Tatoeba (pes-eng)":93.13,"Tatoeba (pms-eng)":36.23,"Tatoeba (pol-eng)":97.32,"Tatoeba (por-eng)":94.54,"Tatoeba (ron-eng)":96.52,"Tatoeba (rus-eng)":92.58,"Tatoeba (slk-eng)":95.82,"Tatoeba (slv-eng)":95.4,"Tatoeba (spa-eng)":97.33,"Tatoeba (sqi-eng)":97.22,"Tatoeba (srp-eng)":93.64,"Tatoeba (swe-eng)":95.31,"Tatoeba (swg-eng)":33.1,"Tatoeba (swh-eng)":55.66,"Tatoeba (tam-eng)":87.32,"Tatoeba (tat-eng)":34.74,"Tatoeba (tel-eng)":96.72,"Tatoeba (tgl-eng)":63.19,"Tatoeba (tha-eng)":96.38,"Tatoeba (tuk-eng)":16.35,"Tatoeba (tur-eng)":98.03,"Tatoeba (tzl-eng)":36.56,"Tatoeba (uig-eng)":56.49,"Tatoeba (ukr-eng)":93.52,"Tatoeba (urd-eng)":84.23,"Tatoeba (uzb-eng)":23.2,"Tatoeba (vie-eng)":96.73,"Tatoeba (war-eng)":8.25,"Tatoeba (wuu-eng)":75.09,"Tatoeba (xho-eng)":4.68,"Tatoeba (yid-eng)":2.49,"Tatoeba (yue-eng)":87.75,"Tatoeba (zsm-eng)":95.41} -{"level_0":7,"index":13,"Rank":8,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":65.2,"BUCC (de-en)":97.86,"BUCC (fr-en)":92.66,"BUCC (ru-en)":93.5,"BUCC (zh-en)":88.79,"Tatoeba (afr-eng)":85.67,"Tatoeba (amh-eng)":76.13,"Tatoeba (ang-eng)":42.23,"Tatoeba (ara-eng)":72.55,"Tatoeba (arq-eng)":22.43,"Tatoeba (arz-eng)":55.14,"Tatoeba (ast-eng)":71.67,"Tatoeba (awa-eng)":74.27,"Tatoeba (aze-eng)":80.14,"Tatoeba (bel-eng)":84.68,"Tatoeba (ben-eng)":77.76,"Tatoeba (ber-eng)":16.92,"Tatoeba (bos-eng)":83.24,"Tatoeba (bre-eng)":8.24,"Tatoeba (bul-eng)":83.81,"Tatoeba (cat-eng)":79.75,"Tatoeba (cbk-eng)":57.77,"Tatoeba (ceb-eng)":42.01,"Tatoeba (ces-eng)":78.83,"Tatoeba (cha-eng)":27.83,"Tatoeba (cmn-eng)":88.89,"Tatoeba (cor-eng)":6.02,"Tatoeba (csb-eng)":23.99,"Tatoeba (cym-eng)":65.32,"Tatoeba (dan-eng)":84.39,"Tatoeba (deu-eng)":96.58,"Tatoeba (dsb-eng)":31.64,"Tatoeba (dtp-eng)":7.68,"Tatoeba (ell-eng)":84.08,"Tatoeba (epo-eng)":90.5,"Tatoeba (est-eng)":55.31,"Tatoeba (eus-eng)":54.47,"Tatoeba (fao-eng)":58.87,"Tatoeba (fin-eng)":68.56,"Tatoeba (fra-eng)":89.93,"Tatoeba (fry-eng)":51.03,"Tatoeba (gla-eng)":39.42,"Tatoeba (gle-eng)":59.59,"Tatoeba (glg-eng)":81.93,"Tatoeba (gsw-eng)":36.45,"Tatoeba (heb-eng)":67.92,"Tatoeba (hin-eng)":93.25,"Tatoeba (hrv-eng)":86.87,"Tatoeba (hsb-eng)":37.02,"Tatoeba (hun-eng)":71.84,"Tatoeba (hye-eng)":82.29,"Tatoeba (ido-eng)":70.86,"Tatoeba (ile-eng)":71.73,"Tatoeba (ina-eng)":85.6,"Tatoeba (ind-eng)":87.81,"Tatoeba (isl-eng)":62.16,"Tatoeba (ita-eng)":87.56,"Tatoeba (jav-eng)":50.66,"Tatoeba (jpn-eng)":83.7,"Tatoeba (kab-eng)":17.83,"Tatoeba (kat-eng)":76.84,"Tatoeba (kaz-eng)":73.03,"Tatoeba (khm-eng)":47.99,"Tatoeba (kor-eng)":74.16,"Tatoeba (kur-eng)":39.67,"Tatoeba (kzj-eng)":8.27,"Tatoeba (lat-eng)":39.95,"Tatoeba (lfn-eng)":52.73,"Tatoeba (lit-eng)":61.52,"Tatoeba (lvs-eng)":60.67,"Tatoeba (mal-eng)":94.78,"Tatoeba (mar-eng)":86.79,"Tatoeba (max-eng)":49.95,"Tatoeba (mhr-eng)":6.08,"Tatoeba (mkd-eng)":59.26,"Tatoeba (mon-eng)":78.22,"Tatoeba (nds-eng)":53.26,"Tatoeba (nld-eng)":90.3,"Tatoeba (nno-eng)":69.48,"Tatoeba (nob-eng)":87.8,"Tatoeba (nov-eng)":68.56,"Tatoeba (oci-eng)":42.84,"Tatoeba (orv-eng)":15.13,"Tatoeba (pam-eng)":6.97,"Tatoeba (pes-eng)":84.0,"Tatoeba (pms-eng)":42.15,"Tatoeba (pol-eng)":85.91,"Tatoeba (por-eng)":89.25,"Tatoeba (ron-eng)":83.67,"Tatoeba (rus-eng)":87.96,"Tatoeba (slk-eng)":79.96,"Tatoeba (slv-eng)":72.85,"Tatoeba (spa-eng)":91.1,"Tatoeba (sqi-eng)":88.56,"Tatoeba (srp-eng)":80.33,"Tatoeba (swe-eng)":84.97,"Tatoeba (swg-eng)":44.3,"Tatoeba (swh-eng)":66.74,"Tatoeba (tam-eng)":81.39,"Tatoeba (tat-eng)":63.38,"Tatoeba (tel-eng)":85.78,"Tatoeba (tgl-eng)":79.26,"Tatoeba (tha-eng)":89.36,"Tatoeba (tuk-eng)":18.41,"Tatoeba (tur-eng)":86.78,"Tatoeba (tzl-eng)":36.2,"Tatoeba (uig-eng)":59.33,"Tatoeba (ukr-eng)":81.62,"Tatoeba (urd-eng)":84.71,"Tatoeba (uzb-eng)":58.0,"Tatoeba (vie-eng)":87.78,"Tatoeba (war-eng)":40.86,"Tatoeba (wuu-eng)":69.05,"Tatoeba (xho-eng)":62.84,"Tatoeba (yid-eng)":66.83,"Tatoeba (yue-eng)":69.69,"Tatoeba (zsm-eng)":90.0} -{"level_0":8,"index":24,"Rank":9,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":63.38,"BUCC (de-en)":98.59,"BUCC (fr-en)":96.89,"BUCC (ru-en)":96.44,"BUCC (zh-en)":97.56,"Tatoeba (afr-eng)":72.96,"Tatoeba (amh-eng)":53.49,"Tatoeba (ang-eng)":16.72,"Tatoeba (ara-eng)":90.19,"Tatoeba (arq-eng)":19.84,"Tatoeba (arz-eng)":55.69,"Tatoeba (ast-eng)":70.08,"Tatoeba (awa-eng)":42.83,"Tatoeba (aze-eng)":76.36,"Tatoeba (bel-eng)":79.94,"Tatoeba (ben-eng)":64.9,"Tatoeba (ber-eng)":4.88,"Tatoeba (bos-eng)":94.02,"Tatoeba (bre-eng)":6.42,"Tatoeba (bul-eng)":93.52,"Tatoeba (cat-eng)":96.05,"Tatoeba (cbk-eng)":58.68,"Tatoeba (ceb-eng)":7.39,"Tatoeba (ces-eng)":95.73,"Tatoeba (cha-eng)":12.59,"Tatoeba (cmn-eng)":95.83,"Tatoeba (cor-eng)":3.53,"Tatoeba (csb-eng)":23.73,"Tatoeba (cym-eng)":22.31,"Tatoeba (dan-eng)":96.17,"Tatoeba (deu-eng)":97.73,"Tatoeba (dsb-eng)":36.85,"Tatoeba (dtp-eng)":5.03,"Tatoeba (ell-eng)":94.93,"Tatoeba (epo-eng)":55.12,"Tatoeba (est-eng)":98.4,"Tatoeba (eus-eng)":31.33,"Tatoeba (fao-eng)":38.24,"Tatoeba (fin-eng)":95.92,"Tatoeba (fra-eng)":93.12,"Tatoeba (fry-eng)":43.54,"Tatoeba (gla-eng)":4.72,"Tatoeba (gle-eng)":16.85,"Tatoeba (glg-eng)":95.32,"Tatoeba (gsw-eng)":25.12,"Tatoeba (heb-eng)":88.26,"Tatoeba (hin-eng)":97.75,"Tatoeba (hrv-eng)":97.0,"Tatoeba (hsb-eng)":44.32,"Tatoeba (hun-eng)":94.18,"Tatoeba (hye-eng)":94.38,"Tatoeba (ido-eng)":43.91,"Tatoeba (ile-eng)":60.36,"Tatoeba (ina-eng)":84.32,"Tatoeba (ind-eng)":93.5,"Tatoeba (isl-eng)":59.25,"Tatoeba (ita-eng)":93.76,"Tatoeba (jav-eng)":23.39,"Tatoeba (jpn-eng)":92.51,"Tatoeba (kab-eng)":1.41,"Tatoeba (kat-eng)":95.46,"Tatoeba (kaz-eng)":61.49,"Tatoeba (khm-eng)":58.8,"Tatoeba (kor-eng)":93.07,"Tatoeba (kur-eng)":61.44,"Tatoeba (kzj-eng)":5.88,"Tatoeba (lat-eng)":24.25,"Tatoeba (lfn-eng)":49.56,"Tatoeba (lit-eng)":95.37,"Tatoeba (lvs-eng)":97.53,"Tatoeba (mal-eng)":88.46,"Tatoeba (mar-eng)":93.83,"Tatoeba (max-eng)":48.77,"Tatoeba (mhr-eng)":7.57,"Tatoeba (mkd-eng)":93.02,"Tatoeba (mon-eng)":96.14,"Tatoeba (nds-eng)":38.88,"Tatoeba (nld-eng)":95.5,"Tatoeba (nno-eng)":81.41,"Tatoeba (nob-eng)":98.53,"Tatoeba (nov-eng)":50.23,"Tatoeba (oci-eng)":43.49,"Tatoeba (orv-eng)":23.77,"Tatoeba (pam-eng)":5.39,"Tatoeba (pes-eng)":93.47,"Tatoeba (pms-eng)":34.19,"Tatoeba (pol-eng)":96.95,"Tatoeba (por-eng)":93.02,"Tatoeba (ron-eng)":96.43,"Tatoeba (rus-eng)":92.92,"Tatoeba (slk-eng)":96.62,"Tatoeba (slv-eng)":97.08,"Tatoeba (spa-eng)":97.0,"Tatoeba (sqi-eng)":98.57,"Tatoeba (srp-eng)":94.12,"Tatoeba (swe-eng)":95.45,"Tatoeba (swg-eng)":22.8,"Tatoeba (swh-eng)":16.02,"Tatoeba (tam-eng)":73.6,"Tatoeba (tat-eng)":10.89,"Tatoeba (tel-eng)":79.73,"Tatoeba (tgl-eng)":17.67,"Tatoeba (tha-eng)":95.99,"Tatoeba (tuk-eng)":14.91,"Tatoeba (tur-eng)":96.17,"Tatoeba (tzl-eng)":34.21,"Tatoeba (uig-eng)":48.35,"Tatoeba (ukr-eng)":92.67,"Tatoeba (urd-eng)":95.12,"Tatoeba (uzb-eng)":23.19,"Tatoeba (vie-eng)":97.23,"Tatoeba (war-eng)":7.42,"Tatoeba (wuu-eng)":78.25,"Tatoeba (xho-eng)":6.53,"Tatoeba (yid-eng)":30.73,"Tatoeba (yue-eng)":77.58,"Tatoeba (zsm-eng)":95.8} -{"level_0":9,"index":23,"Rank":10,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":57.98,"BUCC (de-en)":97.11,"BUCC (fr-en)":94.99,"BUCC (ru-en)":95.06,"BUCC (zh-en)":95.63,"Tatoeba (afr-eng)":58.22,"Tatoeba (amh-eng)":36.21,"Tatoeba (ang-eng)":10.24,"Tatoeba (ara-eng)":87.93,"Tatoeba (arq-eng)":18.6,"Tatoeba (arz-eng)":51.26,"Tatoeba (ast-eng)":62.17,"Tatoeba (awa-eng)":33.43,"Tatoeba (aze-eng)":62.1,"Tatoeba (bel-eng)":67.73,"Tatoeba (ben-eng)":36.48,"Tatoeba (ber-eng)":4.43,"Tatoeba (bos-eng)":93.27,"Tatoeba (bre-eng)":5.56,"Tatoeba (bul-eng)":92.65,"Tatoeba (cat-eng)":94.42,"Tatoeba (cbk-eng)":55.37,"Tatoeba (ceb-eng)":8.05,"Tatoeba (ces-eng)":95.12,"Tatoeba (cha-eng)":15.98,"Tatoeba (cmn-eng)":94.93,"Tatoeba (cor-eng)":3.42,"Tatoeba (csb-eng)":21.56,"Tatoeba (cym-eng)":13.25,"Tatoeba (dan-eng)":94.8,"Tatoeba (deu-eng)":97.02,"Tatoeba (dsb-eng)":33.43,"Tatoeba (dtp-eng)":5.69,"Tatoeba (ell-eng)":95.43,"Tatoeba (epo-eng)":41.73,"Tatoeba (est-eng)":97.33,"Tatoeba (eus-eng)":23.18,"Tatoeba (fao-eng)":27.51,"Tatoeba (fin-eng)":93.1,"Tatoeba (fra-eng)":91.72,"Tatoeba (fry-eng)":31.13,"Tatoeba (gla-eng)":3.61,"Tatoeba (gle-eng)":11.62,"Tatoeba (glg-eng)":94.0,"Tatoeba (gsw-eng)":25.74,"Tatoeba (heb-eng)":86.88,"Tatoeba (hin-eng)":97.62,"Tatoeba (hrv-eng)":95.98,"Tatoeba (hsb-eng)":36.1,"Tatoeba (hun-eng)":91.58,"Tatoeba (hye-eng)":93.28,"Tatoeba (ido-eng)":40.25,"Tatoeba (ile-eng)":57.71,"Tatoeba (ina-eng)":79.13,"Tatoeba (ind-eng)":92.74,"Tatoeba (isl-eng)":24.07,"Tatoeba (ita-eng)":93.05,"Tatoeba (jav-eng)":17.04,"Tatoeba (jpn-eng)":90.41,"Tatoeba (kab-eng)":1.16,"Tatoeba (kat-eng)":95.44,"Tatoeba (kaz-eng)":34.89,"Tatoeba (khm-eng)":32.11,"Tatoeba (kor-eng)":92.52,"Tatoeba (kur-eng)":46.94,"Tatoeba (kzj-eng)":6.24,"Tatoeba (lat-eng)":19.47,"Tatoeba (lfn-eng)":47.02,"Tatoeba (lit-eng)":93.16,"Tatoeba (lvs-eng)":97.87,"Tatoeba (mal-eng)":32.2,"Tatoeba (mar-eng)":92.38,"Tatoeba (max-eng)":45.25,"Tatoeba (mhr-eng)":6.89,"Tatoeba (mkd-eng)":91.0,"Tatoeba (mon-eng)":95.04,"Tatoeba (nds-eng)":32.16,"Tatoeba (nld-eng)":94.58,"Tatoeba (nno-eng)":76.34,"Tatoeba (nob-eng)":97.73,"Tatoeba (nov-eng)":47.99,"Tatoeba (oci-eng)":38.57,"Tatoeba (orv-eng)":15.1,"Tatoeba (pam-eng)":5.41,"Tatoeba (pes-eng)":92.59,"Tatoeba (pms-eng)":30.7,"Tatoeba (pol-eng)":94.28,"Tatoeba (por-eng)":92.13,"Tatoeba (ron-eng)":95.3,"Tatoeba (rus-eng)":91.87,"Tatoeba (slk-eng)":95.15,"Tatoeba (slv-eng)":96.92,"Tatoeba (spa-eng)":95.42,"Tatoeba (sqi-eng)":98.17,"Tatoeba (srp-eng)":92.24,"Tatoeba (swe-eng)":94.42,"Tatoeba (swg-eng)":26.31,"Tatoeba (swh-eng)":14.48,"Tatoeba (tam-eng)":24.64,"Tatoeba (tat-eng)":10.25,"Tatoeba (tel-eng)":36.4,"Tatoeba (tgl-eng)":13.09,"Tatoeba (tha-eng)":96.72,"Tatoeba (tuk-eng)":15.16,"Tatoeba (tur-eng)":95.08,"Tatoeba (tzl-eng)":25.46,"Tatoeba (uig-eng)":24.39,"Tatoeba (ukr-eng)":92.82,"Tatoeba (urd-eng)":94.57,"Tatoeba (uzb-eng)":17.14,"Tatoeba (vie-eng)":95.12,"Tatoeba (war-eng)":7.25,"Tatoeba (wuu-eng)":76.0,"Tatoeba (xho-eng)":4.52,"Tatoeba (yid-eng)":14.38,"Tatoeba (yue-eng)":71.45,"Tatoeba (zsm-eng)":95.31} -{"level_0":10,"index":5,"Rank":11,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.27,"BUCC (de-en)":98.18,"BUCC (fr-en)":98.04,"BUCC (ru-en)":94.58,"BUCC (zh-en)":96.75,"Tatoeba (afr-eng)":50.58,"Tatoeba (amh-eng)":0.36,"Tatoeba (ang-eng)":30.89,"Tatoeba (ara-eng)":79.65,"Tatoeba (arq-eng)":20.68,"Tatoeba (arz-eng)":56.04,"Tatoeba (ast-eng)":55.81,"Tatoeba (awa-eng)":41.58,"Tatoeba (aze-eng)":27.78,"Tatoeba (bel-eng)":41.21,"Tatoeba (ben-eng)":20.85,"Tatoeba (ber-eng)":5.5,"Tatoeba (bos-eng)":36.73,"Tatoeba (bre-eng)":5.46,"Tatoeba (bul-eng)":60.26,"Tatoeba (cat-eng)":63.42,"Tatoeba (cbk-eng)":56.73,"Tatoeba (ceb-eng)":10.46,"Tatoeba (ces-eng)":81.81,"Tatoeba (cha-eng)":22.8,"Tatoeba (cmn-eng)":86.13,"Tatoeba (cor-eng)":3.87,"Tatoeba (csb-eng)":20.52,"Tatoeba (cym-eng)":9.1,"Tatoeba (dan-eng)":72.49,"Tatoeba (deu-eng)":96.57,"Tatoeba (dsb-eng)":28.3,"Tatoeba (dtp-eng)":3.87,"Tatoeba (ell-eng)":10.43,"Tatoeba (epo-eng)":30.54,"Tatoeba (est-eng)":7.43,"Tatoeba (eus-eng)":11.67,"Tatoeba (fao-eng)":21.34,"Tatoeba (fin-eng)":18.11,"Tatoeba (fra-eng)":89.39,"Tatoeba (fry-eng)":37.73,"Tatoeba (gla-eng)":4.23,"Tatoeba (gle-eng)":4.4,"Tatoeba (glg-eng)":74.27,"Tatoeba (gsw-eng)":37.04,"Tatoeba (heb-eng)":72.4,"Tatoeba (hin-eng)":82.06,"Tatoeba (hrv-eng)":34.63,"Tatoeba (hsb-eng)":37.13,"Tatoeba (hun-eng)":13.18,"Tatoeba (hye-eng)":0.37,"Tatoeba (ido-eng)":48.29,"Tatoeba (ile-eng)":61.81,"Tatoeba (ina-eng)":78.25,"Tatoeba (ind-eng)":54.64,"Tatoeba (isl-eng)":12.45,"Tatoeba (ita-eng)":81.99,"Tatoeba (jav-eng)":9.0,"Tatoeba (jpn-eng)":86.92,"Tatoeba (kab-eng)":1.95,"Tatoeba (kat-eng)":0.76,"Tatoeba (kaz-eng)":9.15,"Tatoeba (khm-eng)":0.35,"Tatoeba (kor-eng)":75.76,"Tatoeba (kur-eng)":13.51,"Tatoeba (kzj-eng)":5.8,"Tatoeba (lat-eng)":21.15,"Tatoeba (lfn-eng)":47.49,"Tatoeba (lit-eng)":10.45,"Tatoeba (lvs-eng)":10.72,"Tatoeba (mal-eng)":74.47,"Tatoeba (mar-eng)":67.35,"Tatoeba (max-eng)":22.52,"Tatoeba (mhr-eng)":4.3,"Tatoeba (mkd-eng)":34.72,"Tatoeba (mon-eng)":10.95,"Tatoeba (nds-eng)":49.47,"Tatoeba (nld-eng)":87.22,"Tatoeba (nno-eng)":56.01,"Tatoeba (nob-eng)":79.21,"Tatoeba (nov-eng)":54.06,"Tatoeba (oci-eng)":39.12,"Tatoeba (orv-eng)":19.65,"Tatoeba (pam-eng)":4.84,"Tatoeba (pes-eng)":45.04,"Tatoeba (pms-eng)":34.32,"Tatoeba (pol-eng)":83.35,"Tatoeba (por-eng)":89.38,"Tatoeba (ron-eng)":59.11,"Tatoeba (rus-eng)":84.96,"Tatoeba (slk-eng)":65.42,"Tatoeba (slv-eng)":35.88,"Tatoeba (spa-eng)":93.12,"Tatoeba (sqi-eng)":15.39,"Tatoeba (srp-eng)":35.41,"Tatoeba (swe-eng)":86.33,"Tatoeba (swg-eng)":36.39,"Tatoeba (swh-eng)":7.78,"Tatoeba (tam-eng)":65.79,"Tatoeba (tat-eng)":6.67,"Tatoeba (tel-eng)":65.37,"Tatoeba (tgl-eng)":17.86,"Tatoeba (tha-eng)":3.16,"Tatoeba (tuk-eng)":9.61,"Tatoeba (tur-eng)":77.38,"Tatoeba (tzl-eng)":26.1,"Tatoeba (uig-eng)":1.31,"Tatoeba (ukr-eng)":65.33,"Tatoeba (urd-eng)":6.6,"Tatoeba (uzb-eng)":7.86,"Tatoeba (vie-eng)":50.44,"Tatoeba (war-eng)":10.67,"Tatoeba (wuu-eng)":70.58,"Tatoeba (xho-eng)":9.53,"Tatoeba (yid-eng)":1.46,"Tatoeba (yue-eng)":51.32,"Tatoeba (zsm-eng)":54.61} -{"level_0":11,"index":17,"Rank":12,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.12,"BUCC (de-en)":87.71,"BUCC (fr-en)":98.41,"BUCC (ru-en)":78.75,"BUCC (zh-en)":98.77,"Tatoeba (afr-eng)":22.56,"Tatoeba (amh-eng)":0.1,"Tatoeba (ang-eng)":34.68,"Tatoeba (ara-eng)":90.62,"Tatoeba (arq-eng)":37.21,"Tatoeba (arz-eng)":74.8,"Tatoeba (ast-eng)":68.37,"Tatoeba (awa-eng)":53.01,"Tatoeba (aze-eng)":8.16,"Tatoeba (bel-eng)":18.49,"Tatoeba (ben-eng)":83.89,"Tatoeba (ber-eng)":5.17,"Tatoeba (bos-eng)":19.28,"Tatoeba (bre-eng)":7.41,"Tatoeba (bul-eng)":28.46,"Tatoeba (cat-eng)":90.24,"Tatoeba (cbk-eng)":61.49,"Tatoeba (ceb-eng)":9.08,"Tatoeba (ces-eng)":12.99,"Tatoeba (cha-eng)":26.96,"Tatoeba (cmn-eng)":95.7,"Tatoeba (cor-eng)":3.86,"Tatoeba (csb-eng)":11.04,"Tatoeba (cym-eng)":8.32,"Tatoeba (dan-eng)":33.6,"Tatoeba (deu-eng)":85.06,"Tatoeba (dsb-eng)":15.0,"Tatoeba (dtp-eng)":5.26,"Tatoeba (ell-eng)":13.08,"Tatoeba (epo-eng)":33.92,"Tatoeba (est-eng)":5.61,"Tatoeba (eus-eng)":74.53,"Tatoeba (fao-eng)":14.42,"Tatoeba (fin-eng)":5.48,"Tatoeba (fra-eng)":94.32,"Tatoeba (fry-eng)":32.37,"Tatoeba (gla-eng)":3.1,"Tatoeba (gle-eng)":3.91,"Tatoeba (glg-eng)":82.55,"Tatoeba (gsw-eng)":23.7,"Tatoeba (heb-eng)":7.49,"Tatoeba (hin-eng)":94.88,"Tatoeba (hrv-eng)":17.75,"Tatoeba (hsb-eng)":13.31,"Tatoeba (hun-eng)":6.65,"Tatoeba (hye-eng)":0.92,"Tatoeba (ido-eng)":47.84,"Tatoeba (ile-eng)":63.65,"Tatoeba (ina-eng)":83.73,"Tatoeba (ind-eng)":91.91,"Tatoeba (isl-eng)":8.0,"Tatoeba (ita-eng)":74.96,"Tatoeba (jav-eng)":22.26,"Tatoeba (jpn-eng)":83.88,"Tatoeba (kab-eng)":1.54,"Tatoeba (kat-eng)":1.48,"Tatoeba (kaz-eng)":9.66,"Tatoeba (khm-eng)":0.7,"Tatoeba (kor-eng)":44.79,"Tatoeba (kur-eng)":12.5,"Tatoeba (kzj-eng)":6.3,"Tatoeba (lat-eng)":46.75,"Tatoeba (lfn-eng)":46.48,"Tatoeba (lit-eng)":5.87,"Tatoeba (lvs-eng)":8.58,"Tatoeba (mal-eng)":95.05,"Tatoeba (mar-eng)":71.56,"Tatoeba (max-eng)":41.45,"Tatoeba (mhr-eng)":3.25,"Tatoeba (mkd-eng)":15.58,"Tatoeba (mon-eng)":7.37,"Tatoeba (nds-eng)":29.88,"Tatoeba (nld-eng)":49.85,"Tatoeba (nno-eng)":20.79,"Tatoeba (nob-eng)":31.94,"Tatoeba (nov-eng)":54.93,"Tatoeba (oci-eng)":45.74,"Tatoeba (orv-eng)":8.44,"Tatoeba (pam-eng)":7.03,"Tatoeba (pes-eng)":21.59,"Tatoeba (pms-eng)":36.58,"Tatoeba (pol-eng)":20.27,"Tatoeba (por-eng)":94.43,"Tatoeba (ron-eng)":33.85,"Tatoeba (rus-eng)":71.23,"Tatoeba (slk-eng)":12.3,"Tatoeba (slv-eng)":13.06,"Tatoeba (spa-eng)":98.13,"Tatoeba (sqi-eng)":15.33,"Tatoeba (srp-eng)":18.75,"Tatoeba (swe-eng)":27.82,"Tatoeba (swg-eng)":29.18,"Tatoeba (swh-eng)":27.81,"Tatoeba (tam-eng)":81.87,"Tatoeba (tat-eng)":4.96,"Tatoeba (tel-eng)":80.56,"Tatoeba (tgl-eng)":11.19,"Tatoeba (tha-eng)":6.78,"Tatoeba (tuk-eng)":6.85,"Tatoeba (tur-eng)":8.96,"Tatoeba (tzl-eng)":32.7,"Tatoeba (uig-eng)":2.02,"Tatoeba (ukr-eng)":31.12,"Tatoeba (urd-eng)":82.87,"Tatoeba (uzb-eng)":8.28,"Tatoeba (vie-eng)":95.83,"Tatoeba (war-eng)":11.23,"Tatoeba (wuu-eng)":87.91,"Tatoeba (xho-eng)":7.96,"Tatoeba (yid-eng)":0.49,"Tatoeba (yue-eng)":89.39,"Tatoeba (zsm-eng)":88.11} -{"level_0":12,"index":15,"Rank":13,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":34.17,"BUCC (de-en)":71.06,"BUCC (fr-en)":98.17,"BUCC (ru-en)":58.45,"BUCC (zh-en)":98.6,"Tatoeba (afr-eng)":18.66,"Tatoeba (amh-eng)":0.46,"Tatoeba (ang-eng)":27.44,"Tatoeba (ara-eng)":88.21,"Tatoeba (arq-eng)":29.24,"Tatoeba (arz-eng)":73.17,"Tatoeba (ast-eng)":71.25,"Tatoeba (awa-eng)":46.87,"Tatoeba (aze-eng)":7.61,"Tatoeba (bel-eng)":12.17,"Tatoeba (ben-eng)":80.65,"Tatoeba (ber-eng)":5.53,"Tatoeba (bos-eng)":14.99,"Tatoeba (bre-eng)":6.14,"Tatoeba (bul-eng)":26.9,"Tatoeba (cat-eng)":91.17,"Tatoeba (cbk-eng)":63.69,"Tatoeba (ceb-eng)":8.15,"Tatoeba (ces-eng)":8.29,"Tatoeba (cha-eng)":22.58,"Tatoeba (cmn-eng)":95.08,"Tatoeba (cor-eng)":4.0,"Tatoeba (csb-eng)":10.26,"Tatoeba (cym-eng)":8.46,"Tatoeba (dan-eng)":24.91,"Tatoeba (deu-eng)":72.73,"Tatoeba (dsb-eng)":12.44,"Tatoeba (dtp-eng)":5.41,"Tatoeba (ell-eng)":6.72,"Tatoeba (epo-eng)":28.07,"Tatoeba (est-eng)":5.63,"Tatoeba (eus-eng)":63.81,"Tatoeba (fao-eng)":12.9,"Tatoeba (fin-eng)":5.83,"Tatoeba (fra-eng)":93.53,"Tatoeba (fry-eng)":29.09,"Tatoeba (gla-eng)":4.13,"Tatoeba (gle-eng)":4.53,"Tatoeba (glg-eng)":80.98,"Tatoeba (gsw-eng)":19.31,"Tatoeba (heb-eng)":1.92,"Tatoeba (hin-eng)":92.87,"Tatoeba (hrv-eng)":13.69,"Tatoeba (hsb-eng)":12.35,"Tatoeba (hun-eng)":5.57,"Tatoeba (hye-eng)":1.56,"Tatoeba (ido-eng)":45.82,"Tatoeba (ile-eng)":64.41,"Tatoeba (ina-eng)":83.88,"Tatoeba (ind-eng)":90.54,"Tatoeba (isl-eng)":6.93,"Tatoeba (ita-eng)":73.39,"Tatoeba (jav-eng)":19.8,"Tatoeba (jpn-eng)":73.95,"Tatoeba (kab-eng)":1.92,"Tatoeba (kat-eng)":0.71,"Tatoeba (kaz-eng)":6.75,"Tatoeba (khm-eng)":0.09,"Tatoeba (kor-eng)":29.16,"Tatoeba (kur-eng)":11.84,"Tatoeba (kzj-eng)":5.99,"Tatoeba (lat-eng)":38.34,"Tatoeba (lfn-eng)":48.25,"Tatoeba (lit-eng)":5.82,"Tatoeba (lvs-eng)":6.95,"Tatoeba (mal-eng)":93.12,"Tatoeba (mar-eng)":65.18,"Tatoeba (max-eng)":40.88,"Tatoeba (mhr-eng)":2.88,"Tatoeba (mkd-eng)":13.93,"Tatoeba (mon-eng)":4.48,"Tatoeba (nds-eng)":24.45,"Tatoeba (nld-eng)":36.81,"Tatoeba (nno-eng)":16.07,"Tatoeba (nob-eng)":22.79,"Tatoeba (nov-eng)":53.3,"Tatoeba (oci-eng)":41.08,"Tatoeba (orv-eng)":5.61,"Tatoeba (pam-eng)":7.18,"Tatoeba (pes-eng)":17.01,"Tatoeba (pms-eng)":32.19,"Tatoeba (pol-eng)":14.83,"Tatoeba (por-eng)":93.62,"Tatoeba (ron-eng)":30.35,"Tatoeba (rus-eng)":62.61,"Tatoeba (slk-eng)":11.36,"Tatoeba (slv-eng)":10.89,"Tatoeba (spa-eng)":97.77,"Tatoeba (sqi-eng)":13.17,"Tatoeba (srp-eng)":15.09,"Tatoeba (swe-eng)":21.73,"Tatoeba (swg-eng)":22.77,"Tatoeba (swh-eng)":25.43,"Tatoeba (tam-eng)":84.66,"Tatoeba (tat-eng)":4.92,"Tatoeba (tel-eng)":79.3,"Tatoeba (tgl-eng)":10.75,"Tatoeba (tha-eng)":3.08,"Tatoeba (tuk-eng)":5.83,"Tatoeba (tur-eng)":7.14,"Tatoeba (tzl-eng)":31.64,"Tatoeba (uig-eng)":1.0,"Tatoeba (ukr-eng)":25.42,"Tatoeba (urd-eng)":76.77,"Tatoeba (uzb-eng)":5.91,"Tatoeba (vie-eng)":96.05,"Tatoeba (war-eng)":10.73,"Tatoeba (wuu-eng)":84.6,"Tatoeba (xho-eng)":7.79,"Tatoeba (yid-eng)":0.28,"Tatoeba (yue-eng)":86.38,"Tatoeba (zsm-eng)":85.22} -{"level_0":13,"index":7,"Rank":14,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":31.08,"BUCC (de-en)":54.0,"BUCC (fr-en)":97.06,"BUCC (ru-en)":45.3,"BUCC (zh-en)":97.96,"Tatoeba (afr-eng)":16.62,"Tatoeba (amh-eng)":0.03,"Tatoeba (ang-eng)":28.76,"Tatoeba (ara-eng)":85.37,"Tatoeba (arq-eng)":27.75,"Tatoeba (arz-eng)":70.66,"Tatoeba (ast-eng)":71.13,"Tatoeba (awa-eng)":35.01,"Tatoeba (aze-eng)":6.32,"Tatoeba (bel-eng)":8.03,"Tatoeba (ben-eng)":75.98,"Tatoeba (ber-eng)":4.92,"Tatoeba (bos-eng)":13.65,"Tatoeba (bre-eng)":4.67,"Tatoeba (bul-eng)":20.09,"Tatoeba (cat-eng)":88.31,"Tatoeba (cbk-eng)":64.63,"Tatoeba (ceb-eng)":6.64,"Tatoeba (ces-eng)":9.55,"Tatoeba (cha-eng)":23.26,"Tatoeba (cmn-eng)":91.45,"Tatoeba (cor-eng)":2.83,"Tatoeba (csb-eng)":10.03,"Tatoeba (cym-eng)":6.97,"Tatoeba (dan-eng)":23.52,"Tatoeba (deu-eng)":70.1,"Tatoeba (dsb-eng)":8.78,"Tatoeba (dtp-eng)":3.41,"Tatoeba (ell-eng)":5.34,"Tatoeba (epo-eng)":26.2,"Tatoeba (est-eng)":4.76,"Tatoeba (eus-eng)":53.38,"Tatoeba (fao-eng)":12.61,"Tatoeba (fin-eng)":3.41,"Tatoeba (fra-eng)":91.44,"Tatoeba (fry-eng)":24.62,"Tatoeba (gla-eng)":2.09,"Tatoeba (gle-eng)":3.26,"Tatoeba (glg-eng)":79.86,"Tatoeba (gsw-eng)":21.03,"Tatoeba (heb-eng)":1.69,"Tatoeba (hin-eng)":85.23,"Tatoeba (hrv-eng)":12.79,"Tatoeba (hsb-eng)":9.68,"Tatoeba (hun-eng)":5.07,"Tatoeba (hye-eng)":0.5,"Tatoeba (ido-eng)":43.91,"Tatoeba (ile-eng)":59.59,"Tatoeba (ina-eng)":73.67,"Tatoeba (ind-eng)":88.04,"Tatoeba (isl-eng)":6.29,"Tatoeba (ita-eng)":65.04,"Tatoeba (jav-eng)":15.02,"Tatoeba (jpn-eng)":71.36,"Tatoeba (kab-eng)":1.69,"Tatoeba (kat-eng)":0.42,"Tatoeba (kaz-eng)":3.32,"Tatoeba (khm-eng)":0.37,"Tatoeba (kor-eng)":22.39,"Tatoeba (kur-eng)":8.26,"Tatoeba (kzj-eng)":5.17,"Tatoeba (lat-eng)":28.76,"Tatoeba (lfn-eng)":44.85,"Tatoeba (lit-eng)":4.49,"Tatoeba (lvs-eng)":6.55,"Tatoeba (mal-eng)":83.3,"Tatoeba (mar-eng)":45.53,"Tatoeba (max-eng)":36.14,"Tatoeba (mhr-eng)":1.56,"Tatoeba (mkd-eng)":10.47,"Tatoeba (mon-eng)":2.85,"Tatoeba (nds-eng)":23.92,"Tatoeba (nld-eng)":29.74,"Tatoeba (nno-eng)":16.28,"Tatoeba (nob-eng)":21.07,"Tatoeba (nov-eng)":52.23,"Tatoeba (oci-eng)":40.17,"Tatoeba (orv-eng)":5.79,"Tatoeba (pam-eng)":5.85,"Tatoeba (pes-eng)":12.13,"Tatoeba (pms-eng)":31.94,"Tatoeba (pol-eng)":14.09,"Tatoeba (por-eng)":92.62,"Tatoeba (ron-eng)":27.23,"Tatoeba (rus-eng)":59.84,"Tatoeba (slk-eng)":9.98,"Tatoeba (slv-eng)":10.14,"Tatoeba (spa-eng)":94.48,"Tatoeba (sqi-eng)":10.38,"Tatoeba (srp-eng)":11.69,"Tatoeba (swe-eng)":19.53,"Tatoeba (swg-eng)":16.89,"Tatoeba (swh-eng)":16.74,"Tatoeba (tam-eng)":72.76,"Tatoeba (tat-eng)":3.59,"Tatoeba (tel-eng)":64.62,"Tatoeba (tgl-eng)":10.7,"Tatoeba (tha-eng)":2.22,"Tatoeba (tuk-eng)":5.48,"Tatoeba (tur-eng)":6.15,"Tatoeba (tzl-eng)":27.82,"Tatoeba (uig-eng)":1.27,"Tatoeba (ukr-eng)":22.06,"Tatoeba (urd-eng)":70.1,"Tatoeba (uzb-eng)":4.71,"Tatoeba (vie-eng)":94.2,"Tatoeba (war-eng)":10.38,"Tatoeba (wuu-eng)":79.58,"Tatoeba (xho-eng)":5.51,"Tatoeba (yid-eng)":0.16,"Tatoeba (yue-eng)":77.03,"Tatoeba (zsm-eng)":79.95} -{"level_0":14,"index":14,"Rank":15,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.85,"BUCC (de-en)":42.03,"BUCC (fr-en)":97.71,"BUCC (ru-en)":24.11,"BUCC (zh-en)":98.24,"Tatoeba (afr-eng)":14.54,"Tatoeba (amh-eng)":0.12,"Tatoeba (ang-eng)":19.43,"Tatoeba (ara-eng)":86.43,"Tatoeba (arq-eng)":26.28,"Tatoeba (arz-eng)":66.01,"Tatoeba (ast-eng)":68.16,"Tatoeba (awa-eng)":45.66,"Tatoeba (aze-eng)":6.63,"Tatoeba (bel-eng)":9.62,"Tatoeba (ben-eng)":78.76,"Tatoeba (ber-eng)":4.58,"Tatoeba (bos-eng)":11.93,"Tatoeba (bre-eng)":5.35,"Tatoeba (bul-eng)":14.25,"Tatoeba (cat-eng)":84.54,"Tatoeba (cbk-eng)":61.04,"Tatoeba (ceb-eng)":7.75,"Tatoeba (ces-eng)":5.64,"Tatoeba (cha-eng)":22.9,"Tatoeba (cmn-eng)":93.97,"Tatoeba (cor-eng)":3.13,"Tatoeba (csb-eng)":9.04,"Tatoeba (cym-eng)":7.77,"Tatoeba (dan-eng)":17.55,"Tatoeba (deu-eng)":53.27,"Tatoeba (dsb-eng)":9.57,"Tatoeba (dtp-eng)":3.82,"Tatoeba (ell-eng)":2.34,"Tatoeba (epo-eng)":23.11,"Tatoeba (est-eng)":4.47,"Tatoeba (eus-eng)":59.64,"Tatoeba (fao-eng)":9.42,"Tatoeba (fin-eng)":4.27,"Tatoeba (fra-eng)":92.77,"Tatoeba (fry-eng)":25.34,"Tatoeba (gla-eng)":2.99,"Tatoeba (gle-eng)":3.96,"Tatoeba (glg-eng)":75.92,"Tatoeba (gsw-eng)":21.86,"Tatoeba (heb-eng)":0.69,"Tatoeba (hin-eng)":91.53,"Tatoeba (hrv-eng)":9.73,"Tatoeba (hsb-eng)":9.78,"Tatoeba (hun-eng)":4.3,"Tatoeba (hye-eng)":0.65,"Tatoeba (ido-eng)":39.15,"Tatoeba (ile-eng)":56.08,"Tatoeba (ina-eng)":74.59,"Tatoeba (ind-eng)":88.3,"Tatoeba (isl-eng)":5.46,"Tatoeba (ita-eng)":55.97,"Tatoeba (jav-eng)":18.6,"Tatoeba (jpn-eng)":65.97,"Tatoeba (kab-eng)":1.14,"Tatoeba (kat-eng)":0.91,"Tatoeba (kaz-eng)":4.7,"Tatoeba (khm-eng)":0.01,"Tatoeba (kor-eng)":12.56,"Tatoeba (kur-eng)":10.65,"Tatoeba (kzj-eng)":4.78,"Tatoeba (lat-eng)":27.65,"Tatoeba (lfn-eng)":41.94,"Tatoeba (lit-eng)":4.83,"Tatoeba (lvs-eng)":5.53,"Tatoeba (mal-eng)":85.56,"Tatoeba (mar-eng)":52.67,"Tatoeba (max-eng)":39.38,"Tatoeba (mhr-eng)":2.38,"Tatoeba (mkd-eng)":6.19,"Tatoeba (mon-eng)":4.79,"Tatoeba (nds-eng)":17.76,"Tatoeba (nld-eng)":25.15,"Tatoeba (nno-eng)":13.33,"Tatoeba (nob-eng)":17.67,"Tatoeba (nov-eng)":49.14,"Tatoeba (oci-eng)":39.02,"Tatoeba (orv-eng)":3.07,"Tatoeba (pam-eng)":5.68,"Tatoeba (pes-eng)":13.51,"Tatoeba (pms-eng)":27.6,"Tatoeba (pol-eng)":9.61,"Tatoeba (por-eng)":92.52,"Tatoeba (ron-eng)":23.2,"Tatoeba (rus-eng)":40.27,"Tatoeba (slk-eng)":8.52,"Tatoeba (slv-eng)":8.1,"Tatoeba (spa-eng)":96.05,"Tatoeba (sqi-eng)":12.07,"Tatoeba (srp-eng)":8.76,"Tatoeba (swe-eng)":14.94,"Tatoeba (swg-eng)":21.63,"Tatoeba (swh-eng)":16.31,"Tatoeba (tam-eng)":77.3,"Tatoeba (tat-eng)":2.86,"Tatoeba (tel-eng)":69.05,"Tatoeba (tgl-eng)":8.63,"Tatoeba (tha-eng)":1.64,"Tatoeba (tuk-eng)":5.14,"Tatoeba (tur-eng)":5.09,"Tatoeba (tzl-eng)":31.31,"Tatoeba (uig-eng)":1.68,"Tatoeba (ukr-eng)":14.32,"Tatoeba (urd-eng)":68.96,"Tatoeba (uzb-eng)":5.15,"Tatoeba (vie-eng)":94.57,"Tatoeba (war-eng)":8.07,"Tatoeba (wuu-eng)":81.18,"Tatoeba (xho-eng)":8.3,"Tatoeba (yid-eng)":0.12,"Tatoeba (yue-eng)":81.35,"Tatoeba (zsm-eng)":81.48} -{"level_0":15,"index":16,"Rank":16,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":24.03,"BUCC (de-en)":11.3,"BUCC (fr-en)":96.18,"BUCC (ru-en)":1.59,"BUCC (zh-en)":96.16,"Tatoeba (afr-eng)":9.01,"Tatoeba (amh-eng)":0.21,"Tatoeba (ang-eng)":13.31,"Tatoeba (ara-eng)":80.09,"Tatoeba (arq-eng)":16.16,"Tatoeba (arz-eng)":49.42,"Tatoeba (ast-eng)":59.96,"Tatoeba (awa-eng)":30.54,"Tatoeba (aze-eng)":3.44,"Tatoeba (bel-eng)":3.28,"Tatoeba (ben-eng)":70.14,"Tatoeba (ber-eng)":3.92,"Tatoeba (bos-eng)":9.17,"Tatoeba (bre-eng)":3.85,"Tatoeba (bul-eng)":4.58,"Tatoeba (cat-eng)":81.55,"Tatoeba (cbk-eng)":55.71,"Tatoeba (ceb-eng)":5.64,"Tatoeba (ces-eng)":3.76,"Tatoeba (cha-eng)":16.46,"Tatoeba (cmn-eng)":92.95,"Tatoeba (cor-eng)":3.13,"Tatoeba (csb-eng)":5.26,"Tatoeba (cym-eng)":5.87,"Tatoeba (dan-eng)":11.4,"Tatoeba (deu-eng)":29.34,"Tatoeba (dsb-eng)":5.83,"Tatoeba (dtp-eng)":3.37,"Tatoeba (ell-eng)":0.89,"Tatoeba (epo-eng)":13.42,"Tatoeba (est-eng)":3.08,"Tatoeba (eus-eng)":42.35,"Tatoeba (fao-eng)":8.3,"Tatoeba (fin-eng)":3.5,"Tatoeba (fra-eng)":91.65,"Tatoeba (fry-eng)":18.1,"Tatoeba (gla-eng)":2.73,"Tatoeba (gle-eng)":3.46,"Tatoeba (glg-eng)":69.26,"Tatoeba (gsw-eng)":17.5,"Tatoeba (heb-eng)":0.25,"Tatoeba (hin-eng)":85.35,"Tatoeba (hrv-eng)":6.27,"Tatoeba (hsb-eng)":6.8,"Tatoeba (hun-eng)":3.25,"Tatoeba (hye-eng)":0.23,"Tatoeba (ido-eng)":26.21,"Tatoeba (ile-eng)":44.24,"Tatoeba (ina-eng)":63.9,"Tatoeba (ind-eng)":85.46,"Tatoeba (isl-eng)":3.27,"Tatoeba (ita-eng)":37.29,"Tatoeba (jav-eng)":14.35,"Tatoeba (jpn-eng)":48.83,"Tatoeba (kab-eng)":0.89,"Tatoeba (kat-eng)":0.4,"Tatoeba (kaz-eng)":0.74,"Tatoeba (khm-eng)":0.15,"Tatoeba (kor-eng)":2.65,"Tatoeba (kur-eng)":6.48,"Tatoeba (kzj-eng)":3.39,"Tatoeba (lat-eng)":19.55,"Tatoeba (lfn-eng)":37.52,"Tatoeba (lit-eng)":3.17,"Tatoeba (lvs-eng)":3.53,"Tatoeba (mal-eng)":73.36,"Tatoeba (mar-eng)":47.23,"Tatoeba (max-eng)":33.46,"Tatoeba (mhr-eng)":0.44,"Tatoeba (mkd-eng)":2.77,"Tatoeba (mon-eng)":0.85,"Tatoeba (nds-eng)":11.83,"Tatoeba (nld-eng)":16.33,"Tatoeba (nno-eng)":8.05,"Tatoeba (nob-eng)":11.91,"Tatoeba (nov-eng)":38.78,"Tatoeba (oci-eng)":29.39,"Tatoeba (orv-eng)":0.54,"Tatoeba (pam-eng)":3.78,"Tatoeba (pes-eng)":5.87,"Tatoeba (pms-eng)":17.49,"Tatoeba (pol-eng)":5.63,"Tatoeba (por-eng)":92.08,"Tatoeba (ron-eng)":14.95,"Tatoeba (rus-eng)":25.22,"Tatoeba (slk-eng)":5.49,"Tatoeba (slv-eng)":5.41,"Tatoeba (spa-eng)":95.48,"Tatoeba (sqi-eng)":8.49,"Tatoeba (srp-eng)":4.55,"Tatoeba (swe-eng)":10.72,"Tatoeba (swg-eng)":16.44,"Tatoeba (swh-eng)":14.52,"Tatoeba (tam-eng)":62.26,"Tatoeba (tat-eng)":0.97,"Tatoeba (tel-eng)":35.84,"Tatoeba (tgl-eng)":6.52,"Tatoeba (tha-eng)":0.51,"Tatoeba (tuk-eng)":2.81,"Tatoeba (tur-eng)":3.49,"Tatoeba (tzl-eng)":14.52,"Tatoeba (uig-eng)":0.53,"Tatoeba (ukr-eng)":4.93,"Tatoeba (urd-eng)":61.62,"Tatoeba (uzb-eng)":2.54,"Tatoeba (vie-eng)":92.86,"Tatoeba (war-eng)":7.19,"Tatoeba (wuu-eng)":74.62,"Tatoeba (xho-eng)":4.83,"Tatoeba (yid-eng)":0.19,"Tatoeba (yue-eng)":74.35,"Tatoeba (zsm-eng)":74.89} -{"level_0":16,"index":26,"Rank":17,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":21.98,"BUCC (de-en)":95.04,"BUCC (fr-en)":94.96,"BUCC (ru-en)":8.33,"BUCC (zh-en)":1.3,"Tatoeba (afr-eng)":41.84,"Tatoeba (amh-eng)":0.03,"Tatoeba (ang-eng)":37.87,"Tatoeba (ara-eng)":0.61,"Tatoeba (arq-eng)":0.74,"Tatoeba (arz-eng)":0.42,"Tatoeba (ast-eng)":65.41,"Tatoeba (awa-eng)":1.46,"Tatoeba (aze-eng)":8.79,"Tatoeba (bel-eng)":5.76,"Tatoeba (ben-eng)":0.01,"Tatoeba (ber-eng)":5.92,"Tatoeba (bos-eng)":16.12,"Tatoeba (bre-eng)":6.12,"Tatoeba (bul-eng)":9.06,"Tatoeba (cat-eng)":57.4,"Tatoeba (cbk-eng)":57.68,"Tatoeba (ceb-eng)":12.56,"Tatoeba (ces-eng)":9.47,"Tatoeba (cha-eng)":27.13,"Tatoeba (cmn-eng)":1.82,"Tatoeba (cor-eng)":3.87,"Tatoeba (csb-eng)":14.41,"Tatoeba (cym-eng)":6.69,"Tatoeba (dan-eng)":54.87,"Tatoeba (deu-eng)":93.72,"Tatoeba (dsb-eng)":14.74,"Tatoeba (dtp-eng)":5.84,"Tatoeba (ell-eng)":0.6,"Tatoeba (epo-eng)":30.8,"Tatoeba (est-eng)":5.39,"Tatoeba (eus-eng)":11.9,"Tatoeba (fao-eng)":28.08,"Tatoeba (fin-eng)":6.81,"Tatoeba (fra-eng)":85.29,"Tatoeba (fry-eng)":38.68,"Tatoeba (gla-eng)":2.96,"Tatoeba (gle-eng)":3.74,"Tatoeba (glg-eng)":70.0,"Tatoeba (gsw-eng)":30.49,"Tatoeba (heb-eng)":0.87,"Tatoeba (hin-eng)":0.1,"Tatoeba (hrv-eng)":17.43,"Tatoeba (hsb-eng)":14.69,"Tatoeba (hun-eng)":7.28,"Tatoeba (hye-eng)":0.77,"Tatoeba (ido-eng)":46.65,"Tatoeba (ile-eng)":59.43,"Tatoeba (ina-eng)":82.71,"Tatoeba (ind-eng)":37.26,"Tatoeba (isl-eng)":11.21,"Tatoeba (ita-eng)":79.77,"Tatoeba (jav-eng)":7.81,"Tatoeba (jpn-eng)":0.91,"Tatoeba (kab-eng)":2.23,"Tatoeba (kat-eng)":1.48,"Tatoeba (kaz-eng)":1.77,"Tatoeba (khm-eng)":0.38,"Tatoeba (kor-eng)":1.96,"Tatoeba (kur-eng)":12.11,"Tatoeba (kzj-eng)":6.13,"Tatoeba (lat-eng)":27.84,"Tatoeba (lfn-eng)":45.89,"Tatoeba (lit-eng)":5.94,"Tatoeba (lvs-eng)":8.11,"Tatoeba (mal-eng)":0.59,"Tatoeba (mar-eng)":0.03,"Tatoeba (max-eng)":21.7,"Tatoeba (mhr-eng)":0.68,"Tatoeba (mkd-eng)":5.92,"Tatoeba (mon-eng)":2.39,"Tatoeba (nds-eng)":45.04,"Tatoeba (nld-eng)":64.75,"Tatoeba (nno-eng)":36.74,"Tatoeba (nob-eng)":54.77,"Tatoeba (nov-eng)":57.12,"Tatoeba (oci-eng)":34.39,"Tatoeba (orv-eng)":2.04,"Tatoeba (pam-eng)":8.34,"Tatoeba (pes-eng)":0.87,"Tatoeba (pms-eng)":38.06,"Tatoeba (pol-eng)":28.35,"Tatoeba (por-eng)":83.61,"Tatoeba (ron-eng)":65.27,"Tatoeba (rus-eng)":30.42,"Tatoeba (slk-eng)":13.19,"Tatoeba (slv-eng)":13.49,"Tatoeba (spa-eng)":89.18,"Tatoeba (sqi-eng)":14.66,"Tatoeba (srp-eng)":13.24,"Tatoeba (swe-eng)":60.67,"Tatoeba (swg-eng)":34.76,"Tatoeba (swh-eng)":8.07,"Tatoeba (tam-eng)":0.36,"Tatoeba (tat-eng)":1.46,"Tatoeba (tel-eng)":0.67,"Tatoeba (tgl-eng)":25.22,"Tatoeba (tha-eng)":1.58,"Tatoeba (tuk-eng)":4.99,"Tatoeba (tur-eng)":7.72,"Tatoeba (tzl-eng)":38.49,"Tatoeba (uig-eng)":0.87,"Tatoeba (ukr-eng)":9.12,"Tatoeba (urd-eng)":0.0,"Tatoeba (uzb-eng)":5.48,"Tatoeba (vie-eng)":8.45,"Tatoeba (war-eng)":13.75,"Tatoeba (wuu-eng)":1.44,"Tatoeba (xho-eng)":9.15,"Tatoeba (yid-eng)":0.28,"Tatoeba (yue-eng)":0.98,"Tatoeba (zsm-eng)":35.71} -{"level_0":17,"index":22,"Rank":18,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":19.48,"BUCC (de-en)":90.99,"BUCC (fr-en)":88.55,"BUCC (ru-en)":2.07,"BUCC (zh-en)":1.49,"Tatoeba (afr-eng)":33.47,"Tatoeba (amh-eng)":0.01,"Tatoeba (ang-eng)":30.74,"Tatoeba (ara-eng)":0.47,"Tatoeba (arq-eng)":0.34,"Tatoeba (arz-eng)":0.14,"Tatoeba (ast-eng)":51.74,"Tatoeba (awa-eng)":0.49,"Tatoeba (aze-eng)":7.43,"Tatoeba (bel-eng)":3.45,"Tatoeba (ben-eng)":0.06,"Tatoeba (ber-eng)":5.79,"Tatoeba (bos-eng)":17.43,"Tatoeba (bre-eng)":5.69,"Tatoeba (bul-eng)":7.55,"Tatoeba (cat-eng)":48.06,"Tatoeba (cbk-eng)":54.56,"Tatoeba (ceb-eng)":8.72,"Tatoeba (ces-eng)":8.76,"Tatoeba (cha-eng)":27.56,"Tatoeba (cmn-eng)":2.26,"Tatoeba (cor-eng)":3.69,"Tatoeba (csb-eng)":13.18,"Tatoeba (cym-eng)":6.97,"Tatoeba (dan-eng)":47.36,"Tatoeba (deu-eng)":91.54,"Tatoeba (dsb-eng)":13.2,"Tatoeba (dtp-eng)":4.54,"Tatoeba (ell-eng)":0.55,"Tatoeba (epo-eng)":27.86,"Tatoeba (est-eng)":5.13,"Tatoeba (eus-eng)":10.23,"Tatoeba (fao-eng)":21.44,"Tatoeba (fin-eng)":6.62,"Tatoeba (fra-eng)":79.66,"Tatoeba (fry-eng)":32.92,"Tatoeba (gla-eng)":2.87,"Tatoeba (gle-eng)":3.26,"Tatoeba (glg-eng)":63.81,"Tatoeba (gsw-eng)":29.71,"Tatoeba (heb-eng)":0.33,"Tatoeba (hin-eng)":0.25,"Tatoeba (hrv-eng)":17.16,"Tatoeba (hsb-eng)":12.02,"Tatoeba (hun-eng)":7.21,"Tatoeba (hye-eng)":0.78,"Tatoeba (ido-eng)":40.83,"Tatoeba (ile-eng)":54.95,"Tatoeba (ina-eng)":72.28,"Tatoeba (ind-eng)":30.95,"Tatoeba (isl-eng)":11.29,"Tatoeba (ita-eng)":73.83,"Tatoeba (jav-eng)":8.66,"Tatoeba (jpn-eng)":0.61,"Tatoeba (kab-eng)":1.78,"Tatoeba (kat-eng)":0.79,"Tatoeba (kaz-eng)":0.95,"Tatoeba (khm-eng)":0.49,"Tatoeba (kor-eng)":1.87,"Tatoeba (kur-eng)":10.91,"Tatoeba (kzj-eng)":5.72,"Tatoeba (lat-eng)":18.24,"Tatoeba (lfn-eng)":43.49,"Tatoeba (lit-eng)":7.13,"Tatoeba (lvs-eng)":7.04,"Tatoeba (mal-eng)":0.44,"Tatoeba (mar-eng)":0.03,"Tatoeba (max-eng)":18.99,"Tatoeba (mhr-eng)":1.11,"Tatoeba (mkd-eng)":2.49,"Tatoeba (mon-eng)":2.01,"Tatoeba (nds-eng)":39.96,"Tatoeba (nld-eng)":58.86,"Tatoeba (nno-eng)":29.07,"Tatoeba (nob-eng)":40.25,"Tatoeba (nov-eng)":50.19,"Tatoeba (oci-eng)":30.72,"Tatoeba (orv-eng)":0.85,"Tatoeba (pam-eng)":7.21,"Tatoeba (pes-eng)":0.53,"Tatoeba (pms-eng)":31.07,"Tatoeba (pol-eng)":18.06,"Tatoeba (por-eng)":81.92,"Tatoeba (ron-eng)":62.6,"Tatoeba (rus-eng)":22.24,"Tatoeba (slk-eng)":10.59,"Tatoeba (slv-eng)":11.4,"Tatoeba (spa-eng)":85.78,"Tatoeba (sqi-eng)":14.92,"Tatoeba (srp-eng)":9.87,"Tatoeba (swe-eng)":55.08,"Tatoeba (swg-eng)":32.66,"Tatoeba (swh-eng)":7.64,"Tatoeba (tam-eng)":0.49,"Tatoeba (tat-eng)":1.28,"Tatoeba (tel-eng)":0.45,"Tatoeba (tgl-eng)":23.63,"Tatoeba (tha-eng)":0.61,"Tatoeba (tuk-eng)":5.71,"Tatoeba (tur-eng)":8.25,"Tatoeba (tzl-eng)":28.4,"Tatoeba (uig-eng)":0.57,"Tatoeba (ukr-eng)":5.69,"Tatoeba (urd-eng)":0.0,"Tatoeba (uzb-eng)":4.19,"Tatoeba (vie-eng)":9.07,"Tatoeba (war-eng)":12.31,"Tatoeba (wuu-eng)":1.38,"Tatoeba (xho-eng)":7.6,"Tatoeba (yid-eng)":0.41,"Tatoeba (yue-eng)":1.31,"Tatoeba (zsm-eng)":29.74} -{"level_0":18,"index":1,"Rank":19,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":18.78,"BUCC (de-en)":9.26,"BUCC (fr-en)":17.41,"BUCC (ru-en)":51.97,"BUCC (zh-en)":88.7,"Tatoeba (afr-eng)":13.22,"Tatoeba (amh-eng)":5.75,"Tatoeba (ang-eng)":13.26,"Tatoeba (ara-eng)":19.56,"Tatoeba (arq-eng)":3.99,"Tatoeba (arz-eng)":11.42,"Tatoeba (ast-eng)":17.74,"Tatoeba (awa-eng)":16.75,"Tatoeba (aze-eng)":8.55,"Tatoeba (bel-eng)":24.34,"Tatoeba (ben-eng)":23.45,"Tatoeba (ber-eng)":2.49,"Tatoeba (bos-eng)":15.81,"Tatoeba (bre-eng)":3.22,"Tatoeba (bul-eng)":42.18,"Tatoeba (cat-eng)":15.96,"Tatoeba (cbk-eng)":10.95,"Tatoeba (ceb-eng)":4.8,"Tatoeba (ces-eng)":11.89,"Tatoeba (cha-eng)":9.11,"Tatoeba (cmn-eng)":86.26,"Tatoeba (cor-eng)":1.94,"Tatoeba (csb-eng)":7.17,"Tatoeba (cym-eng)":5.64,"Tatoeba (dan-eng)":26.0,"Tatoeba (deu-eng)":20.2,"Tatoeba (dsb-eng)":4.49,"Tatoeba (dtp-eng)":1.76,"Tatoeba (ell-eng)":9.5,"Tatoeba (epo-eng)":8.25,"Tatoeba (est-eng)":5.45,"Tatoeba (eus-eng)":7.38,"Tatoeba (fao-eng)":6.34,"Tatoeba (fin-eng)":13.53,"Tatoeba (fra-eng)":19.96,"Tatoeba (fry-eng)":19.03,"Tatoeba (gla-eng)":2.68,"Tatoeba (gle-eng)":3.65,"Tatoeba (glg-eng)":19.32,"Tatoeba (gsw-eng)":11.69,"Tatoeba (heb-eng)":30.26,"Tatoeba (hin-eng)":49.45,"Tatoeba (hrv-eng)":10.05,"Tatoeba (hsb-eng)":6.44,"Tatoeba (hun-eng)":14.77,"Tatoeba (hye-eng)":23.13,"Tatoeba (ido-eng)":11.78,"Tatoeba (ile-eng)":17.24,"Tatoeba (ina-eng)":23.22,"Tatoeba (ind-eng)":32.13,"Tatoeba (isl-eng)":7.49,"Tatoeba (ita-eng)":15.98,"Tatoeba (jav-eng)":6.74,"Tatoeba (jpn-eng)":52.34,"Tatoeba (kab-eng)":0.6,"Tatoeba (kat-eng)":33.0,"Tatoeba (kaz-eng)":16.97,"Tatoeba (khm-eng)":15.33,"Tatoeba (kor-eng)":48.19,"Tatoeba (kur-eng)":6.11,"Tatoeba (kzj-eng)":2.34,"Tatoeba (lat-eng)":7.87,"Tatoeba (lfn-eng)":8.76,"Tatoeba (lit-eng)":8.13,"Tatoeba (lvs-eng)":7.4,"Tatoeba (mal-eng)":36.61,"Tatoeba (mar-eng)":28.56,"Tatoeba (max-eng)":13.73,"Tatoeba (mhr-eng)":3.52,"Tatoeba (mkd-eng)":16.69,"Tatoeba (mon-eng)":24.85,"Tatoeba (nds-eng)":14.99,"Tatoeba (nld-eng)":29.41,"Tatoeba (nno-eng)":13.8,"Tatoeba (nob-eng)":27.89,"Tatoeba (nov-eng)":25.22,"Tatoeba (oci-eng)":6.69,"Tatoeba (orv-eng)":3.11,"Tatoeba (pam-eng)":3.09,"Tatoeba (pes-eng)":51.06,"Tatoeba (pms-eng)":9.27,"Tatoeba (pol-eng)":14.51,"Tatoeba (por-eng)":28.6,"Tatoeba (ron-eng)":17.36,"Tatoeba (rus-eng)":60.68,"Tatoeba (slk-eng)":14.05,"Tatoeba (slv-eng)":10.17,"Tatoeba (spa-eng)":28.19,"Tatoeba (sqi-eng)":15.83,"Tatoeba (srp-eng)":12.92,"Tatoeba (swe-eng)":21.75,"Tatoeba (swg-eng)":10.95,"Tatoeba (swh-eng)":6.37,"Tatoeba (tam-eng)":26.72,"Tatoeba (tat-eng)":2.51,"Tatoeba (tel-eng)":23.69,"Tatoeba (tgl-eng)":5.76,"Tatoeba (tha-eng)":60.21,"Tatoeba (tuk-eng)":4.02,"Tatoeba (tur-eng)":9.38,"Tatoeba (tzl-eng)":20.61,"Tatoeba (uig-eng)":2.65,"Tatoeba (ukr-eng)":39.8,"Tatoeba (urd-eng)":24.79,"Tatoeba (uzb-eng)":3.53,"Tatoeba (vie-eng)":49.56,"Tatoeba (war-eng)":4.84,"Tatoeba (wuu-eng)":73.25,"Tatoeba (xho-eng)":8.03,"Tatoeba (yid-eng)":2.26,"Tatoeba (yue-eng)":58.35,"Tatoeba (zsm-eng)":30.9} -{"level_0":19,"index":25,"Rank":20,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":17.26,"BUCC (de-en)":87.0,"BUCC (fr-en)":88.91,"BUCC (ru-en)":0.44,"BUCC (zh-en)":0.95,"Tatoeba (afr-eng)":23.7,"Tatoeba (amh-eng)":0.65,"Tatoeba (ang-eng)":30.98,"Tatoeba (ara-eng)":0.48,"Tatoeba (arq-eng)":0.68,"Tatoeba (arz-eng)":0.22,"Tatoeba (ast-eng)":55.3,"Tatoeba (awa-eng)":1.03,"Tatoeba (aze-eng)":5.83,"Tatoeba (bel-eng)":1.66,"Tatoeba (ben-eng)":0.0,"Tatoeba (ber-eng)":5.62,"Tatoeba (bos-eng)":12.23,"Tatoeba (bre-eng)":5.84,"Tatoeba (bul-eng)":1.35,"Tatoeba (cat-eng)":48.56,"Tatoeba (cbk-eng)":46.97,"Tatoeba (ceb-eng)":9.79,"Tatoeba (ces-eng)":6.0,"Tatoeba (cha-eng)":24.21,"Tatoeba (cmn-eng)":2.26,"Tatoeba (cor-eng)":4.03,"Tatoeba (csb-eng)":9.53,"Tatoeba (cym-eng)":9.17,"Tatoeba (dan-eng)":34.63,"Tatoeba (deu-eng)":89.31,"Tatoeba (dsb-eng)":9.68,"Tatoeba (dtp-eng)":4.66,"Tatoeba (ell-eng)":0.77,"Tatoeba (epo-eng)":26.88,"Tatoeba (est-eng)":5.19,"Tatoeba (eus-eng)":9.46,"Tatoeba (fao-eng)":21.59,"Tatoeba (fin-eng)":5.66,"Tatoeba (fra-eng)":79.71,"Tatoeba (fry-eng)":28.29,"Tatoeba (gla-eng)":2.34,"Tatoeba (gle-eng)":3.55,"Tatoeba (glg-eng)":56.25,"Tatoeba (gsw-eng)":24.25,"Tatoeba (heb-eng)":0.57,"Tatoeba (hin-eng)":0.12,"Tatoeba (hrv-eng)":10.29,"Tatoeba (hsb-eng)":9.52,"Tatoeba (hun-eng)":6.22,"Tatoeba (hye-eng)":0.81,"Tatoeba (ido-eng)":41.11,"Tatoeba (ile-eng)":54.0,"Tatoeba (ina-eng)":75.47,"Tatoeba (ind-eng)":13.02,"Tatoeba (isl-eng)":8.98,"Tatoeba (ita-eng)":67.23,"Tatoeba (jav-eng)":8.54,"Tatoeba (jpn-eng)":0.99,"Tatoeba (kab-eng)":1.85,"Tatoeba (kat-eng)":1.37,"Tatoeba (kaz-eng)":0.67,"Tatoeba (khm-eng)":0.56,"Tatoeba (kor-eng)":1.73,"Tatoeba (kur-eng)":9.23,"Tatoeba (kzj-eng)":5.38,"Tatoeba (lat-eng)":21.3,"Tatoeba (lfn-eng)":40.48,"Tatoeba (lit-eng)":5.38,"Tatoeba (lvs-eng)":6.83,"Tatoeba (mal-eng)":0.45,"Tatoeba (mar-eng)":0.01,"Tatoeba (max-eng)":16.44,"Tatoeba (mhr-eng)":0.33,"Tatoeba (mkd-eng)":0.4,"Tatoeba (mon-eng)":2.48,"Tatoeba (nds-eng)":34.66,"Tatoeba (nld-eng)":42.72,"Tatoeba (nno-eng)":24.08,"Tatoeba (nob-eng)":34.17,"Tatoeba (nov-eng)":55.01,"Tatoeba (oci-eng)":29.15,"Tatoeba (orv-eng)":0.2,"Tatoeba (pam-eng)":6.99,"Tatoeba (pes-eng)":0.9,"Tatoeba (pms-eng)":30.8,"Tatoeba (pol-eng)":12.81,"Tatoeba (por-eng)":73.45,"Tatoeba (ron-eng)":54.86,"Tatoeba (rus-eng)":2.43,"Tatoeba (slk-eng)":8.35,"Tatoeba (slv-eng)":9.3,"Tatoeba (spa-eng)":78.87,"Tatoeba (sqi-eng)":11.74,"Tatoeba (srp-eng)":5.83,"Tatoeba (swe-eng)":35.41,"Tatoeba (swg-eng)":28.18,"Tatoeba (swh-eng)":7.53,"Tatoeba (tam-eng)":0.36,"Tatoeba (tat-eng)":1.01,"Tatoeba (tel-eng)":1.1,"Tatoeba (tgl-eng)":12.4,"Tatoeba (tha-eng)":1.58,"Tatoeba (tuk-eng)":4.95,"Tatoeba (tur-eng)":6.45,"Tatoeba (tzl-eng)":37.82,"Tatoeba (uig-eng)":0.67,"Tatoeba (ukr-eng)":1.88,"Tatoeba (urd-eng)":0.0,"Tatoeba (uzb-eng)":4.79,"Tatoeba (vie-eng)":7.03,"Tatoeba (war-eng)":9.68,"Tatoeba (wuu-eng)":1.28,"Tatoeba (xho-eng)":10.64,"Tatoeba (yid-eng)":0.57,"Tatoeba (yue-eng)":0.88,"Tatoeba (zsm-eng)":14.67} -{"level_0":20,"index":6,"Rank":21,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":6.61,"BUCC (de-en)":14.99,"BUCC (fr-en)":32.42,"BUCC (ru-en)":0.18,"BUCC (zh-en)":2.76,"Tatoeba (afr-eng)":8.05,"Tatoeba (amh-eng)":0.67,"Tatoeba (ang-eng)":17.0,"Tatoeba (ara-eng)":0.3,"Tatoeba (arq-eng)":0.26,"Tatoeba (arz-eng)":0.0,"Tatoeba (ast-eng)":17.93,"Tatoeba (awa-eng)":0.29,"Tatoeba (aze-eng)":3.26,"Tatoeba (bel-eng)":0.96,"Tatoeba (ben-eng)":0.07,"Tatoeba (ber-eng)":5.21,"Tatoeba (bos-eng)":7.18,"Tatoeba (bre-eng)":3.81,"Tatoeba (bul-eng)":0.99,"Tatoeba (cat-eng)":15.52,"Tatoeba (cbk-eng)":14.09,"Tatoeba (ceb-eng)":4.59,"Tatoeba (ces-eng)":4.73,"Tatoeba (cha-eng)":14.16,"Tatoeba (cmn-eng)":2.89,"Tatoeba (cor-eng)":3.05,"Tatoeba (csb-eng)":6.64,"Tatoeba (cym-eng)":6.45,"Tatoeba (dan-eng)":10.06,"Tatoeba (deu-eng)":18.14,"Tatoeba (dsb-eng)":3.81,"Tatoeba (dtp-eng)":2.73,"Tatoeba (ell-eng)":0.5,"Tatoeba (epo-eng)":10.98,"Tatoeba (est-eng)":3.5,"Tatoeba (eus-eng)":7.35,"Tatoeba (fao-eng)":8.13,"Tatoeba (fin-eng)":3.62,"Tatoeba (fra-eng)":21.53,"Tatoeba (fry-eng)":14.62,"Tatoeba (gla-eng)":2.82,"Tatoeba (gle-eng)":2.38,"Tatoeba (glg-eng)":18.49,"Tatoeba (gsw-eng)":13.55,"Tatoeba (heb-eng)":0.1,"Tatoeba (hin-eng)":0.0,"Tatoeba (hrv-eng)":5.52,"Tatoeba (hsb-eng)":4.08,"Tatoeba (hun-eng)":4.68,"Tatoeba (hye-eng)":0.4,"Tatoeba (ido-eng)":17.46,"Tatoeba (ile-eng)":20.98,"Tatoeba (ina-eng)":31.39,"Tatoeba (ind-eng)":8.37,"Tatoeba (isl-eng)":4.15,"Tatoeba (ita-eng)":18.5,"Tatoeba (jav-eng)":5.54,"Tatoeba (jpn-eng)":1.58,"Tatoeba (kab-eng)":1.02,"Tatoeba (kat-eng)":0.28,"Tatoeba (kaz-eng)":0.58,"Tatoeba (khm-eng)":0.51,"Tatoeba (kor-eng)":0.5,"Tatoeba (kur-eng)":6.31,"Tatoeba (kzj-eng)":3.6,"Tatoeba (lat-eng)":11.38,"Tatoeba (lfn-eng)":12.55,"Tatoeba (lit-eng)":2.48,"Tatoeba (lvs-eng)":4.88,"Tatoeba (mal-eng)":0.4,"Tatoeba (mar-eng)":0.0,"Tatoeba (max-eng)":7.27,"Tatoeba (mhr-eng)":0.1,"Tatoeba (mkd-eng)":0.15,"Tatoeba (mon-eng)":1.44,"Tatoeba (nds-eng)":13.34,"Tatoeba (nld-eng)":12.37,"Tatoeba (nno-eng)":7.02,"Tatoeba (nob-eng)":10.3,"Tatoeba (nov-eng)":30.33,"Tatoeba (oci-eng)":11.98,"Tatoeba (orv-eng)":0.0,"Tatoeba (pam-eng)":4.85,"Tatoeba (pes-eng)":0.2,"Tatoeba (pms-eng)":11.4,"Tatoeba (pol-eng)":6.57,"Tatoeba (por-eng)":18.86,"Tatoeba (ron-eng)":10.94,"Tatoeba (rus-eng)":0.25,"Tatoeba (slk-eng)":5.63,"Tatoeba (slv-eng)":4.38,"Tatoeba (spa-eng)":18.97,"Tatoeba (sqi-eng)":6.3,"Tatoeba (srp-eng)":2.98,"Tatoeba (swe-eng)":8.47,"Tatoeba (swg-eng)":9.33,"Tatoeba (swh-eng)":6.9,"Tatoeba (tam-eng)":0.51,"Tatoeba (tat-eng)":0.81,"Tatoeba (tel-eng)":0.51,"Tatoeba (tgl-eng)":3.22,"Tatoeba (tha-eng)":1.0,"Tatoeba (tuk-eng)":3.48,"Tatoeba (tur-eng)":4.22,"Tatoeba (tzl-eng)":14.76,"Tatoeba (uig-eng)":0.21,"Tatoeba (ukr-eng)":0.78,"Tatoeba (urd-eng)":0.0,"Tatoeba (uzb-eng)":3.19,"Tatoeba (vie-eng)":5.28,"Tatoeba (war-eng)":5.48,"Tatoeba (wuu-eng)":2.62,"Tatoeba (xho-eng)":2.17,"Tatoeba (yid-eng)":0.3,"Tatoeba (yue-eng)":2.15,"Tatoeba (zsm-eng)":7.56} -{"level_0":21,"index":21,"Rank":22,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":4.54,"BUCC (de-en)":0.18,"BUCC (fr-en)":0.08,"BUCC (ru-en)":0.15,"BUCC (zh-en)":0.05,"Tatoeba (afr-eng)":4.82,"Tatoeba (amh-eng)":1.18,"Tatoeba (ang-eng)":8.54,"Tatoeba (ara-eng)":0.63,"Tatoeba (arq-eng)":0.4,"Tatoeba (arz-eng)":0.63,"Tatoeba (ast-eng)":11.69,"Tatoeba (awa-eng)":0.0,"Tatoeba (aze-eng)":3.22,"Tatoeba (bel-eng)":1.75,"Tatoeba (ben-eng)":0.2,"Tatoeba (ber-eng)":7.0,"Tatoeba (bos-eng)":9.31,"Tatoeba (bre-eng)":4.17,"Tatoeba (bul-eng)":1.29,"Tatoeba (cat-eng)":7.73,"Tatoeba (cbk-eng)":5.61,"Tatoeba (ceb-eng)":4.88,"Tatoeba (ces-eng)":3.55,"Tatoeba (cha-eng)":19.29,"Tatoeba (cmn-eng)":0.5,"Tatoeba (cor-eng)":4.15,"Tatoeba (csb-eng)":5.69,"Tatoeba (cym-eng)":8.4,"Tatoeba (dan-eng)":6.99,"Tatoeba (deu-eng)":3.67,"Tatoeba (dsb-eng)":5.33,"Tatoeba (dtp-eng)":4.25,"Tatoeba (ell-eng)":0.63,"Tatoeba (epo-eng)":2.45,"Tatoeba (est-eng)":2.69,"Tatoeba (eus-eng)":4.69,"Tatoeba (fao-eng)":7.61,"Tatoeba (fin-eng)":3.36,"Tatoeba (fra-eng)":7.0,"Tatoeba (fry-eng)":12.36,"Tatoeba (gla-eng)":3.07,"Tatoeba (gle-eng)":4.81,"Tatoeba (glg-eng)":8.12,"Tatoeba (gsw-eng)":18.87,"Tatoeba (heb-eng)":0.68,"Tatoeba (hin-eng)":0.1,"Tatoeba (hrv-eng)":5.41,"Tatoeba (hsb-eng)":6.32,"Tatoeba (hun-eng)":3.42,"Tatoeba (hye-eng)":0.97,"Tatoeba (ido-eng)":7.1,"Tatoeba (ile-eng)":13.61,"Tatoeba (ina-eng)":8.57,"Tatoeba (ind-eng)":7.26,"Tatoeba (isl-eng)":4.09,"Tatoeba (ita-eng)":5.54,"Tatoeba (jav-eng)":11.43,"Tatoeba (jpn-eng)":0.2,"Tatoeba (kab-eng)":2.71,"Tatoeba (kat-eng)":1.11,"Tatoeba (kaz-eng)":1.17,"Tatoeba (khm-eng)":0.55,"Tatoeba (kor-eng)":0.5,"Tatoeba (kur-eng)":8.55,"Tatoeba (kzj-eng)":4.61,"Tatoeba (lat-eng)":4.07,"Tatoeba (lfn-eng)":2.83,"Tatoeba (lit-eng)":0.95,"Tatoeba (lvs-eng)":3.25,"Tatoeba (mal-eng)":0.29,"Tatoeba (mar-eng)":0.2,"Tatoeba (max-eng)":14.53,"Tatoeba (mhr-eng)":0.2,"Tatoeba (mkd-eng)":0.2,"Tatoeba (mon-eng)":1.1,"Tatoeba (nds-eng)":10.37,"Tatoeba (nld-eng)":9.5,"Tatoeba (nno-eng)":4.49,"Tatoeba (nob-eng)":4.95,"Tatoeba (nov-eng)":14.53,"Tatoeba (oci-eng)":5.8,"Tatoeba (orv-eng)":0.24,"Tatoeba (pam-eng)":6.65,"Tatoeba (pes-eng)":0.5,"Tatoeba (pms-eng)":8.05,"Tatoeba (pol-eng)":5.13,"Tatoeba (por-eng)":5.87,"Tatoeba (ron-eng)":6.76,"Tatoeba (rus-eng)":0.2,"Tatoeba (slk-eng)":4.23,"Tatoeba (slv-eng)":6.05,"Tatoeba (spa-eng)":5.03,"Tatoeba (sqi-eng)":4.36,"Tatoeba (srp-eng)":1.77,"Tatoeba (swe-eng)":6.72,"Tatoeba (swg-eng)":8.54,"Tatoeba (swh-eng)":11.49,"Tatoeba (tam-eng)":1.3,"Tatoeba (tat-eng)":0.77,"Tatoeba (tel-eng)":0.85,"Tatoeba (tgl-eng)":2.61,"Tatoeba (tha-eng)":0.69,"Tatoeba (tuk-eng)":5.76,"Tatoeba (tur-eng)":5.24,"Tatoeba (tzl-eng)":15.51,"Tatoeba (uig-eng)":0.6,"Tatoeba (ukr-eng)":1.23,"Tatoeba (urd-eng)":0.4,"Tatoeba (uzb-eng)":4.73,"Tatoeba (vie-eng)":6.55,"Tatoeba (war-eng)":4.12,"Tatoeba (wuu-eng)":0.2,"Tatoeba (xho-eng)":4.33,"Tatoeba (yid-eng)":0.59,"Tatoeba (yue-eng)":0.5,"Tatoeba (zsm-eng)":7.27} -{"level_0":22,"index":2,"Rank":23,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BUCC (de-en)":"","BUCC (fr-en)":"","BUCC (ru-en)":"","BUCC (zh-en)":9.35,"Tatoeba (afr-eng)":"","Tatoeba (amh-eng)":"","Tatoeba (ang-eng)":"","Tatoeba (ara-eng)":"","Tatoeba (arq-eng)":"","Tatoeba (arz-eng)":"","Tatoeba (ast-eng)":"","Tatoeba (awa-eng)":"","Tatoeba (aze-eng)":"","Tatoeba (bel-eng)":"","Tatoeba (ben-eng)":"","Tatoeba (ber-eng)":"","Tatoeba (bos-eng)":"","Tatoeba (bre-eng)":"","Tatoeba (bul-eng)":"","Tatoeba (cat-eng)":"","Tatoeba (cbk-eng)":"","Tatoeba (ceb-eng)":"","Tatoeba (ces-eng)":"","Tatoeba (cha-eng)":"","Tatoeba (cmn-eng)":"","Tatoeba (cor-eng)":"","Tatoeba (csb-eng)":"","Tatoeba (cym-eng)":"","Tatoeba (dan-eng)":"","Tatoeba (deu-eng)":"","Tatoeba (dsb-eng)":"","Tatoeba (dtp-eng)":"","Tatoeba (ell-eng)":"","Tatoeba (epo-eng)":"","Tatoeba (est-eng)":"","Tatoeba (eus-eng)":"","Tatoeba (fao-eng)":"","Tatoeba (fin-eng)":"","Tatoeba (fra-eng)":"","Tatoeba (fry-eng)":"","Tatoeba (gla-eng)":"","Tatoeba (gle-eng)":"","Tatoeba (glg-eng)":"","Tatoeba (gsw-eng)":"","Tatoeba (heb-eng)":"","Tatoeba (hin-eng)":"","Tatoeba (hrv-eng)":"","Tatoeba (hsb-eng)":"","Tatoeba (hun-eng)":"","Tatoeba (hye-eng)":"","Tatoeba (ido-eng)":"","Tatoeba (ile-eng)":"","Tatoeba (ina-eng)":"","Tatoeba (ind-eng)":"","Tatoeba (isl-eng)":"","Tatoeba (ita-eng)":"","Tatoeba (jav-eng)":"","Tatoeba (jpn-eng)":"","Tatoeba (kab-eng)":"","Tatoeba (kat-eng)":"","Tatoeba (kaz-eng)":"","Tatoeba (khm-eng)":"","Tatoeba (kor-eng)":"","Tatoeba (kur-eng)":"","Tatoeba (kzj-eng)":"","Tatoeba (lat-eng)":"","Tatoeba (lfn-eng)":"","Tatoeba (lit-eng)":"","Tatoeba (lvs-eng)":"","Tatoeba (mal-eng)":"","Tatoeba (mar-eng)":"","Tatoeba (max-eng)":"","Tatoeba (mhr-eng)":"","Tatoeba (mkd-eng)":"","Tatoeba (mon-eng)":"","Tatoeba (nds-eng)":"","Tatoeba (nld-eng)":"","Tatoeba (nno-eng)":"","Tatoeba (nob-eng)":"","Tatoeba (nov-eng)":"","Tatoeba (oci-eng)":"","Tatoeba (orv-eng)":"","Tatoeba (pam-eng)":"","Tatoeba (pes-eng)":"","Tatoeba (pms-eng)":"","Tatoeba (pol-eng)":"","Tatoeba (por-eng)":"","Tatoeba (ron-eng)":"","Tatoeba (rus-eng)":"","Tatoeba (slk-eng)":"","Tatoeba (slv-eng)":"","Tatoeba (spa-eng)":"","Tatoeba (sqi-eng)":"","Tatoeba (srp-eng)":"","Tatoeba (swe-eng)":"","Tatoeba (swg-eng)":"","Tatoeba (swh-eng)":"","Tatoeba (tam-eng)":"","Tatoeba (tat-eng)":"","Tatoeba (tel-eng)":"","Tatoeba (tgl-eng)":"","Tatoeba (tha-eng)":"","Tatoeba (tuk-eng)":"","Tatoeba (tur-eng)":"","Tatoeba (tzl-eng)":"","Tatoeba (uig-eng)":"","Tatoeba (ukr-eng)":"","Tatoeba (urd-eng)":"","Tatoeba (uzb-eng)":"","Tatoeba (vie-eng)":"","Tatoeba (war-eng)":"","Tatoeba (wuu-eng)":"","Tatoeba (xho-eng)":"","Tatoeba (yid-eng)":"","Tatoeba (yue-eng)":"","Tatoeba (zsm-eng)":""} -{"level_0":23,"index":3,"Rank":24,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BUCC (de-en)":1.09,"BUCC (fr-en)":0.02,"BUCC (ru-en)":0.0,"BUCC (zh-en)":0.0,"Tatoeba (afr-eng)":"","Tatoeba (amh-eng)":"","Tatoeba (ang-eng)":"","Tatoeba (ara-eng)":"","Tatoeba (arq-eng)":"","Tatoeba (arz-eng)":"","Tatoeba (ast-eng)":"","Tatoeba (awa-eng)":"","Tatoeba (aze-eng)":"","Tatoeba (bel-eng)":"","Tatoeba (ben-eng)":"","Tatoeba (ber-eng)":"","Tatoeba (bos-eng)":"","Tatoeba (bre-eng)":"","Tatoeba (bul-eng)":"","Tatoeba (cat-eng)":"","Tatoeba (cbk-eng)":"","Tatoeba (ceb-eng)":"","Tatoeba (ces-eng)":"","Tatoeba (cha-eng)":"","Tatoeba (cmn-eng)":"","Tatoeba (cor-eng)":"","Tatoeba (csb-eng)":"","Tatoeba (cym-eng)":"","Tatoeba (dan-eng)":"","Tatoeba (deu-eng)":"","Tatoeba (dsb-eng)":"","Tatoeba (dtp-eng)":"","Tatoeba (ell-eng)":"","Tatoeba (epo-eng)":"","Tatoeba (est-eng)":"","Tatoeba (eus-eng)":"","Tatoeba (fao-eng)":"","Tatoeba (fin-eng)":"","Tatoeba (fra-eng)":"","Tatoeba (fry-eng)":"","Tatoeba (gla-eng)":"","Tatoeba (gle-eng)":"","Tatoeba (glg-eng)":"","Tatoeba (gsw-eng)":"","Tatoeba (heb-eng)":"","Tatoeba (hin-eng)":"","Tatoeba (hrv-eng)":"","Tatoeba (hsb-eng)":"","Tatoeba (hun-eng)":"","Tatoeba (hye-eng)":"","Tatoeba (ido-eng)":"","Tatoeba (ile-eng)":"","Tatoeba (ina-eng)":"","Tatoeba (ind-eng)":"","Tatoeba (isl-eng)":"","Tatoeba (ita-eng)":"","Tatoeba (jav-eng)":"","Tatoeba (jpn-eng)":"","Tatoeba (kab-eng)":"","Tatoeba (kat-eng)":"","Tatoeba (kaz-eng)":"","Tatoeba (khm-eng)":"","Tatoeba (kor-eng)":"","Tatoeba (kur-eng)":"","Tatoeba (kzj-eng)":"","Tatoeba (lat-eng)":"","Tatoeba (lfn-eng)":"","Tatoeba (lit-eng)":"","Tatoeba (lvs-eng)":"","Tatoeba (mal-eng)":"","Tatoeba (mar-eng)":"","Tatoeba (max-eng)":"","Tatoeba (mhr-eng)":"","Tatoeba (mkd-eng)":"","Tatoeba (mon-eng)":"","Tatoeba (nds-eng)":"","Tatoeba (nld-eng)":"","Tatoeba (nno-eng)":"","Tatoeba (nob-eng)":"","Tatoeba (nov-eng)":"","Tatoeba (oci-eng)":"","Tatoeba (orv-eng)":"","Tatoeba (pam-eng)":"","Tatoeba (pes-eng)":"","Tatoeba (pms-eng)":"","Tatoeba (pol-eng)":"","Tatoeba (por-eng)":"","Tatoeba (ron-eng)":"","Tatoeba (rus-eng)":"","Tatoeba (slk-eng)":"","Tatoeba (slv-eng)":"","Tatoeba (spa-eng)":"","Tatoeba (sqi-eng)":"","Tatoeba (srp-eng)":"","Tatoeba (swe-eng)":"","Tatoeba (swg-eng)":"","Tatoeba (swh-eng)":"","Tatoeba (tam-eng)":"","Tatoeba (tat-eng)":"","Tatoeba (tel-eng)":"","Tatoeba (tgl-eng)":"","Tatoeba (tha-eng)":"","Tatoeba (tuk-eng)":"","Tatoeba (tur-eng)":"","Tatoeba (tzl-eng)":"","Tatoeba (uig-eng)":"","Tatoeba (ukr-eng)":"","Tatoeba (urd-eng)":"","Tatoeba (uzb-eng)":"","Tatoeba (vie-eng)":"","Tatoeba (war-eng)":"","Tatoeba (wuu-eng)":"","Tatoeba (xho-eng)":"","Tatoeba (yid-eng)":"","Tatoeba (yue-eng)":"","Tatoeba (zsm-eng)":""} -{"level_0":24,"index":4,"Rank":25,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BUCC (de-en)":75.15,"BUCC (fr-en)":0.42,"BUCC (ru-en)":0.01,"BUCC (zh-en)":0.32,"Tatoeba (afr-eng)":"","Tatoeba (amh-eng)":"","Tatoeba (ang-eng)":"","Tatoeba (ara-eng)":"","Tatoeba (arq-eng)":"","Tatoeba (arz-eng)":"","Tatoeba (ast-eng)":"","Tatoeba (awa-eng)":"","Tatoeba (aze-eng)":"","Tatoeba (bel-eng)":"","Tatoeba (ben-eng)":"","Tatoeba (ber-eng)":"","Tatoeba (bos-eng)":"","Tatoeba (bre-eng)":"","Tatoeba (bul-eng)":"","Tatoeba (cat-eng)":"","Tatoeba (cbk-eng)":"","Tatoeba (ceb-eng)":"","Tatoeba (ces-eng)":"","Tatoeba (cha-eng)":"","Tatoeba (cmn-eng)":"","Tatoeba (cor-eng)":"","Tatoeba (csb-eng)":"","Tatoeba (cym-eng)":"","Tatoeba (dan-eng)":"","Tatoeba (deu-eng)":"","Tatoeba (dsb-eng)":"","Tatoeba (dtp-eng)":"","Tatoeba (ell-eng)":"","Tatoeba (epo-eng)":"","Tatoeba (est-eng)":"","Tatoeba (eus-eng)":"","Tatoeba (fao-eng)":"","Tatoeba (fin-eng)":"","Tatoeba (fra-eng)":"","Tatoeba (fry-eng)":"","Tatoeba (gla-eng)":"","Tatoeba (gle-eng)":"","Tatoeba (glg-eng)":"","Tatoeba (gsw-eng)":"","Tatoeba (heb-eng)":"","Tatoeba (hin-eng)":"","Tatoeba (hrv-eng)":"","Tatoeba (hsb-eng)":"","Tatoeba (hun-eng)":"","Tatoeba (hye-eng)":"","Tatoeba (ido-eng)":"","Tatoeba (ile-eng)":"","Tatoeba (ina-eng)":"","Tatoeba (ind-eng)":"","Tatoeba (isl-eng)":"","Tatoeba (ita-eng)":"","Tatoeba (jav-eng)":"","Tatoeba (jpn-eng)":"","Tatoeba (kab-eng)":"","Tatoeba (kat-eng)":"","Tatoeba (kaz-eng)":"","Tatoeba (khm-eng)":"","Tatoeba (kor-eng)":"","Tatoeba (kur-eng)":"","Tatoeba (kzj-eng)":"","Tatoeba (lat-eng)":"","Tatoeba (lfn-eng)":"","Tatoeba (lit-eng)":"","Tatoeba (lvs-eng)":"","Tatoeba (mal-eng)":"","Tatoeba (mar-eng)":"","Tatoeba (max-eng)":"","Tatoeba (mhr-eng)":"","Tatoeba (mkd-eng)":"","Tatoeba (mon-eng)":"","Tatoeba (nds-eng)":"","Tatoeba (nld-eng)":"","Tatoeba (nno-eng)":"","Tatoeba (nob-eng)":"","Tatoeba (nov-eng)":"","Tatoeba (oci-eng)":"","Tatoeba (orv-eng)":"","Tatoeba (pam-eng)":"","Tatoeba (pes-eng)":"","Tatoeba (pms-eng)":"","Tatoeba (pol-eng)":"","Tatoeba (por-eng)":"","Tatoeba (ron-eng)":"","Tatoeba (rus-eng)":"","Tatoeba (slk-eng)":"","Tatoeba (slv-eng)":"","Tatoeba (spa-eng)":"","Tatoeba (sqi-eng)":"","Tatoeba (srp-eng)":"","Tatoeba (swe-eng)":"","Tatoeba (swg-eng)":"","Tatoeba (swh-eng)":"","Tatoeba (tam-eng)":"","Tatoeba (tat-eng)":"","Tatoeba (tel-eng)":"","Tatoeba (tgl-eng)":"","Tatoeba (tha-eng)":"","Tatoeba (tuk-eng)":"","Tatoeba (tur-eng)":"","Tatoeba (tzl-eng)":"","Tatoeba (uig-eng)":"","Tatoeba (ukr-eng)":"","Tatoeba (urd-eng)":"","Tatoeba (uzb-eng)":"","Tatoeba (vie-eng)":"","Tatoeba (war-eng)":"","Tatoeba (wuu-eng)":"","Tatoeba (xho-eng)":"","Tatoeba (yid-eng)":"","Tatoeba (yue-eng)":"","Tatoeba (zsm-eng)":""} -{"level_0":25,"index":18,"Rank":26,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BUCC (de-en)":98.87,"BUCC (fr-en)":"","BUCC (ru-en)":"","BUCC (zh-en)":"","Tatoeba (afr-eng)":"","Tatoeba (amh-eng)":"","Tatoeba (ang-eng)":"","Tatoeba (ara-eng)":"","Tatoeba (arq-eng)":"","Tatoeba (arz-eng)":"","Tatoeba (ast-eng)":"","Tatoeba (awa-eng)":"","Tatoeba (aze-eng)":"","Tatoeba (bel-eng)":"","Tatoeba (ben-eng)":"","Tatoeba (ber-eng)":"","Tatoeba (bos-eng)":"","Tatoeba (bre-eng)":"","Tatoeba (bul-eng)":"","Tatoeba (cat-eng)":"","Tatoeba (cbk-eng)":"","Tatoeba (ceb-eng)":"","Tatoeba (ces-eng)":"","Tatoeba (cha-eng)":"","Tatoeba (cmn-eng)":"","Tatoeba (cor-eng)":"","Tatoeba (csb-eng)":"","Tatoeba (cym-eng)":"","Tatoeba (dan-eng)":"","Tatoeba (deu-eng)":"","Tatoeba (dsb-eng)":"","Tatoeba (dtp-eng)":"","Tatoeba (ell-eng)":"","Tatoeba (epo-eng)":"","Tatoeba (est-eng)":"","Tatoeba (eus-eng)":"","Tatoeba (fao-eng)":"","Tatoeba (fin-eng)":"","Tatoeba (fra-eng)":"","Tatoeba (fry-eng)":"","Tatoeba (gla-eng)":"","Tatoeba (gle-eng)":"","Tatoeba (glg-eng)":"","Tatoeba (gsw-eng)":"","Tatoeba (heb-eng)":"","Tatoeba (hin-eng)":"","Tatoeba (hrv-eng)":"","Tatoeba (hsb-eng)":"","Tatoeba (hun-eng)":"","Tatoeba (hye-eng)":"","Tatoeba (ido-eng)":"","Tatoeba (ile-eng)":"","Tatoeba (ina-eng)":"","Tatoeba (ind-eng)":"","Tatoeba (isl-eng)":"","Tatoeba (ita-eng)":"","Tatoeba (jav-eng)":"","Tatoeba (jpn-eng)":"","Tatoeba (kab-eng)":"","Tatoeba (kat-eng)":"","Tatoeba (kaz-eng)":"","Tatoeba (khm-eng)":"","Tatoeba (kor-eng)":"","Tatoeba (kur-eng)":"","Tatoeba (kzj-eng)":"","Tatoeba (lat-eng)":"","Tatoeba (lfn-eng)":"","Tatoeba (lit-eng)":"","Tatoeba (lvs-eng)":"","Tatoeba (mal-eng)":"","Tatoeba (mar-eng)":"","Tatoeba (max-eng)":"","Tatoeba (mhr-eng)":"","Tatoeba (mkd-eng)":"","Tatoeba (mon-eng)":"","Tatoeba (nds-eng)":"","Tatoeba (nld-eng)":"","Tatoeba (nno-eng)":"","Tatoeba (nob-eng)":"","Tatoeba (nov-eng)":"","Tatoeba (oci-eng)":"","Tatoeba (orv-eng)":"","Tatoeba (pam-eng)":"","Tatoeba (pes-eng)":"","Tatoeba (pms-eng)":"","Tatoeba (pol-eng)":"","Tatoeba (por-eng)":"","Tatoeba (ron-eng)":"","Tatoeba (rus-eng)":"","Tatoeba (slk-eng)":"","Tatoeba (slv-eng)":"","Tatoeba (spa-eng)":"","Tatoeba (sqi-eng)":"","Tatoeba (srp-eng)":"","Tatoeba (swe-eng)":"","Tatoeba (swg-eng)":"","Tatoeba (swh-eng)":"","Tatoeba (tam-eng)":"","Tatoeba (tat-eng)":"","Tatoeba (tel-eng)":"","Tatoeba (tgl-eng)":"","Tatoeba (tha-eng)":"","Tatoeba (tuk-eng)":"","Tatoeba (tur-eng)":"","Tatoeba (tzl-eng)":"","Tatoeba (uig-eng)":"","Tatoeba (ukr-eng)":"","Tatoeba (urd-eng)":"","Tatoeba (uzb-eng)":"","Tatoeba (vie-eng)":"","Tatoeba (war-eng)":"","Tatoeba (wuu-eng)":"","Tatoeba (xho-eng)":"","Tatoeba (yid-eng)":"","Tatoeba (yue-eng)":"","Tatoeba (zsm-eng)":""} -{"level_0":26,"index":20,"Rank":27,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":"","BUCC (de-en)":0.18,"BUCC (fr-en)":0.19,"BUCC (ru-en)":0.1,"BUCC (zh-en)":0.0,"Tatoeba (afr-eng)":"","Tatoeba (amh-eng)":"","Tatoeba (ang-eng)":"","Tatoeba (ara-eng)":"","Tatoeba (arq-eng)":"","Tatoeba (arz-eng)":"","Tatoeba (ast-eng)":"","Tatoeba (awa-eng)":"","Tatoeba (aze-eng)":"","Tatoeba (bel-eng)":"","Tatoeba (ben-eng)":"","Tatoeba (ber-eng)":"","Tatoeba (bos-eng)":"","Tatoeba (bre-eng)":"","Tatoeba (bul-eng)":"","Tatoeba (cat-eng)":"","Tatoeba (cbk-eng)":"","Tatoeba (ceb-eng)":"","Tatoeba (ces-eng)":"","Tatoeba (cha-eng)":"","Tatoeba (cmn-eng)":"","Tatoeba (cor-eng)":"","Tatoeba (csb-eng)":"","Tatoeba (cym-eng)":"","Tatoeba (dan-eng)":"","Tatoeba (deu-eng)":"","Tatoeba (dsb-eng)":"","Tatoeba (dtp-eng)":"","Tatoeba (ell-eng)":"","Tatoeba (epo-eng)":"","Tatoeba (est-eng)":"","Tatoeba (eus-eng)":"","Tatoeba (fao-eng)":"","Tatoeba (fin-eng)":"","Tatoeba (fra-eng)":"","Tatoeba (fry-eng)":"","Tatoeba (gla-eng)":"","Tatoeba (gle-eng)":"","Tatoeba (glg-eng)":"","Tatoeba (gsw-eng)":"","Tatoeba (heb-eng)":"","Tatoeba (hin-eng)":"","Tatoeba (hrv-eng)":"","Tatoeba (hsb-eng)":"","Tatoeba (hun-eng)":"","Tatoeba (hye-eng)":"","Tatoeba (ido-eng)":"","Tatoeba (ile-eng)":"","Tatoeba (ina-eng)":"","Tatoeba (ind-eng)":"","Tatoeba (isl-eng)":"","Tatoeba (ita-eng)":"","Tatoeba (jav-eng)":"","Tatoeba (jpn-eng)":"","Tatoeba (kab-eng)":"","Tatoeba (kat-eng)":"","Tatoeba (kaz-eng)":"","Tatoeba (khm-eng)":"","Tatoeba (kor-eng)":"","Tatoeba (kur-eng)":"","Tatoeba (kzj-eng)":"","Tatoeba (lat-eng)":"","Tatoeba (lfn-eng)":"","Tatoeba (lit-eng)":"","Tatoeba (lvs-eng)":"","Tatoeba (mal-eng)":"","Tatoeba (mar-eng)":"","Tatoeba (max-eng)":"","Tatoeba (mhr-eng)":"","Tatoeba (mkd-eng)":"","Tatoeba (mon-eng)":"","Tatoeba (nds-eng)":"","Tatoeba (nld-eng)":"","Tatoeba (nno-eng)":"","Tatoeba (nob-eng)":"","Tatoeba (nov-eng)":"","Tatoeba (oci-eng)":"","Tatoeba (orv-eng)":"","Tatoeba (pam-eng)":"","Tatoeba (pes-eng)":"","Tatoeba (pms-eng)":"","Tatoeba (pol-eng)":"","Tatoeba (por-eng)":"","Tatoeba (ron-eng)":"","Tatoeba (rus-eng)":"","Tatoeba (slk-eng)":"","Tatoeba (slv-eng)":"","Tatoeba (spa-eng)":"","Tatoeba (sqi-eng)":"","Tatoeba (srp-eng)":"","Tatoeba (swe-eng)":"","Tatoeba (swg-eng)":"","Tatoeba (swh-eng)":"","Tatoeba (tam-eng)":"","Tatoeba (tat-eng)":"","Tatoeba (tel-eng)":"","Tatoeba (tgl-eng)":"","Tatoeba (tha-eng)":"","Tatoeba (tuk-eng)":"","Tatoeba (tur-eng)":"","Tatoeba (tzl-eng)":"","Tatoeba (uig-eng)":"","Tatoeba (ukr-eng)":"","Tatoeba (urd-eng)":"","Tatoeba (uzb-eng)":"","Tatoeba (vie-eng)":"","Tatoeba (war-eng)":"","Tatoeba (wuu-eng)":"","Tatoeba (xho-eng)":"","Tatoeba (yid-eng)":"","Tatoeba (yue-eng)":"","Tatoeba (zsm-eng)":""} -{"level_0":27,"index":27,"Rank":28,"Model":"bi-cse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BUCC (de-en)":"","BUCC (fr-en)":"","BUCC (ru-en)":"","BUCC (zh-en)":98.39,"Tatoeba (afr-eng)":"","Tatoeba (amh-eng)":"","Tatoeba (ang-eng)":"","Tatoeba (ara-eng)":"","Tatoeba (arq-eng)":"","Tatoeba (arz-eng)":"","Tatoeba (ast-eng)":"","Tatoeba (awa-eng)":"","Tatoeba (aze-eng)":"","Tatoeba (bel-eng)":"","Tatoeba (ben-eng)":"","Tatoeba (ber-eng)":"","Tatoeba (bos-eng)":"","Tatoeba (bre-eng)":"","Tatoeba (bul-eng)":"","Tatoeba (cat-eng)":"","Tatoeba (cbk-eng)":"","Tatoeba (ceb-eng)":"","Tatoeba (ces-eng)":"","Tatoeba (cha-eng)":"","Tatoeba (cmn-eng)":95.15,"Tatoeba (cor-eng)":"","Tatoeba (csb-eng)":"","Tatoeba (cym-eng)":"","Tatoeba (dan-eng)":"","Tatoeba (deu-eng)":"","Tatoeba (dsb-eng)":"","Tatoeba (dtp-eng)":"","Tatoeba (ell-eng)":"","Tatoeba (epo-eng)":"","Tatoeba (est-eng)":"","Tatoeba (eus-eng)":"","Tatoeba (fao-eng)":"","Tatoeba (fin-eng)":"","Tatoeba (fra-eng)":"","Tatoeba (fry-eng)":"","Tatoeba (gla-eng)":"","Tatoeba (gle-eng)":"","Tatoeba (glg-eng)":"","Tatoeba (gsw-eng)":"","Tatoeba (heb-eng)":"","Tatoeba (hin-eng)":"","Tatoeba (hrv-eng)":"","Tatoeba (hsb-eng)":"","Tatoeba (hun-eng)":"","Tatoeba (hye-eng)":"","Tatoeba (ido-eng)":"","Tatoeba (ile-eng)":"","Tatoeba (ina-eng)":"","Tatoeba (ind-eng)":"","Tatoeba (isl-eng)":"","Tatoeba (ita-eng)":"","Tatoeba (jav-eng)":"","Tatoeba (jpn-eng)":"","Tatoeba (kab-eng)":"","Tatoeba (kat-eng)":"","Tatoeba (kaz-eng)":"","Tatoeba (khm-eng)":"","Tatoeba (kor-eng)":"","Tatoeba (kur-eng)":"","Tatoeba (kzj-eng)":"","Tatoeba (lat-eng)":"","Tatoeba (lfn-eng)":"","Tatoeba (lit-eng)":"","Tatoeba (lvs-eng)":"","Tatoeba (mal-eng)":"","Tatoeba (mar-eng)":"","Tatoeba (max-eng)":"","Tatoeba (mhr-eng)":"","Tatoeba (mkd-eng)":"","Tatoeba (mon-eng)":"","Tatoeba (nds-eng)":"","Tatoeba (nld-eng)":"","Tatoeba (nno-eng)":"","Tatoeba (nob-eng)":"","Tatoeba (nov-eng)":"","Tatoeba (oci-eng)":"","Tatoeba (orv-eng)":"","Tatoeba (pam-eng)":"","Tatoeba (pes-eng)":"","Tatoeba (pms-eng)":"","Tatoeba (pol-eng)":"","Tatoeba (por-eng)":"","Tatoeba (ron-eng)":"","Tatoeba (rus-eng)":"","Tatoeba (slk-eng)":"","Tatoeba (slv-eng)":"","Tatoeba (spa-eng)":"","Tatoeba (sqi-eng)":"","Tatoeba (srp-eng)":"","Tatoeba (swe-eng)":"","Tatoeba (swg-eng)":"","Tatoeba (swh-eng)":"","Tatoeba (tam-eng)":"","Tatoeba (tat-eng)":"","Tatoeba (tel-eng)":"","Tatoeba (tgl-eng)":"","Tatoeba (tha-eng)":"","Tatoeba (tuk-eng)":"","Tatoeba (tur-eng)":"","Tatoeba (tzl-eng)":"","Tatoeba (uig-eng)":"","Tatoeba (ukr-eng)":"","Tatoeba (urd-eng)":"","Tatoeba (uzb-eng)":"","Tatoeba (vie-eng)":"","Tatoeba (war-eng)":"","Tatoeba (wuu-eng)":"","Tatoeba (xho-eng)":"","Tatoeba (yid-eng)":"","Tatoeba (yue-eng)":"","Tatoeba (zsm-eng)":""} +{"Rank":1,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":81.75,"BUCC (de-en)":99.35,"BUCC (fr-en)":98.72,"BUCC (ru-en)":97.78,"BUCC (zh-en)":99.16,"Tatoeba (afr-eng)":96.18,"Tatoeba (amh-eng)":91.47,"Tatoeba (ang-eng)":59.28,"Tatoeba (ara-eng)":88.8,"Tatoeba (arq-eng)":42.69,"Tatoeba (arz-eng)":76.0,"Tatoeba (ast-eng)":90.68,"Tatoeba (awa-eng)":71.7,"Tatoeba (aze-eng)":94.93,"Tatoeba (bel-eng)":95.0,"Tatoeba (ben-eng)":88.55,"Tatoeba (ber-eng)":8.4,"Tatoeba (bos-eng)":94.92,"Tatoeba (bre-eng)":15.07,"Tatoeba (bul-eng)":94.58,"Tatoeba (cat-eng)":95.38,"Tatoeba (cbk-eng)":79.44,"Tatoeba (ceb-eng)":64.42,"Tatoeba (ces-eng)":96.68,"Tatoeba (cha-eng)":31.77,"Tatoeba (cmn-eng)":95.1,"Tatoeba (cor-eng)":10.11,"Tatoeba (csb-eng)":52.57,"Tatoeba (cym-eng)":92.0,"Tatoeba (dan-eng)":95.71,"Tatoeba (deu-eng)":99.2,"Tatoeba (dsb-eng)":64.81,"Tatoeba (dtp-eng)":10.85,"Tatoeba (ell-eng)":95.35,"Tatoeba (epo-eng)":98.2,"Tatoeba (est-eng)":96.55,"Tatoeba (eus-eng)":95.01,"Tatoeba (fao-eng)":87.4,"Tatoeba (fin-eng)":96.37,"Tatoeba (fra-eng)":94.86,"Tatoeba (fry-eng)":89.31,"Tatoeba (gla-eng)":85.66,"Tatoeba (gle-eng)":93.8,"Tatoeba (glg-eng)":96.82,"Tatoeba (gsw-eng)":46.5,"Tatoeba (heb-eng)":91.53,"Tatoeba (hin-eng)":96.87,"Tatoeba (hrv-eng)":96.95,"Tatoeba (hsb-eng)":67.11,"Tatoeba (hun-eng)":96.55,"Tatoeba (hye-eng)":94.09,"Tatoeba (ido-eng)":89.42,"Tatoeba (ile-eng)":85.58,"Tatoeba (ina-eng)":95.37,"Tatoeba (ind-eng)":93.66,"Tatoeba (isl-eng)":94.75,"Tatoeba (ita-eng)":92.72,"Tatoeba (jav-eng)":79.77,"Tatoeba (jpn-eng)":95.38,"Tatoeba (kab-eng)":4.31,"Tatoeba (kat-eng)":95.02,"Tatoeba (kaz-eng)":87.49,"Tatoeba (khm-eng)":78.37,"Tatoeba (kor-eng)":90.95,"Tatoeba (kur-eng)":83.59,"Tatoeba (kzj-eng)":11.33,"Tatoeba (lat-eng)":80.07,"Tatoeba (lfn-eng)":67.54,"Tatoeba (lit-eng)":96.47,"Tatoeba (lvs-eng)":95.88,"Tatoeba (mal-eng)":98.45,"Tatoeba (mar-eng)":92.65,"Tatoeba (max-eng)":63.26,"Tatoeba (mhr-eng)":15.74,"Tatoeba (mkd-eng)":93.6,"Tatoeba (mon-eng)":95.91,"Tatoeba (nds-eng)":79.42,"Tatoeba (nld-eng)":96.07,"Tatoeba (nno-eng)":94.48,"Tatoeba (nob-eng)":98.4,"Tatoeba (nov-eng)":74.38,"Tatoeba (oci-eng)":65.81,"Tatoeba (orv-eng)":38.93,"Tatoeba (pam-eng)":10.73,"Tatoeba (pes-eng)":94.7,"Tatoeba (pms-eng)":64.57,"Tatoeba (pol-eng)":97.22,"Tatoeba (por-eng)":94.14,"Tatoeba (ron-eng)":96.92,"Tatoeba (rus-eng)":93.75,"Tatoeba (slk-eng)":96.5,"Tatoeba (slv-eng)":96.03,"Tatoeba (spa-eng)":98.4,"Tatoeba (sqi-eng)":96.76,"Tatoeba (srp-eng)":94.43,"Tatoeba (swe-eng)":95.63,"Tatoeba (swg-eng)":59.36,"Tatoeba (swh-eng)":84.5,"Tatoeba (tam-eng)":89.0,"Tatoeba (tat-eng)":85.92,"Tatoeba (tel-eng)":97.86,"Tatoeba (tgl-eng)":96.02,"Tatoeba (tha-eng)":96.14,"Tatoeba (tuk-eng)":75.27,"Tatoeba (tur-eng)":98.0,"Tatoeba (tzl-eng)":58.88,"Tatoeba (uig-eng)":92.4,"Tatoeba (ukr-eng)":93.97,"Tatoeba (urd-eng)":93.22,"Tatoeba (uzb-eng)":84.23,"Tatoeba (vie-eng)":97.2,"Tatoeba (war-eng)":60.29,"Tatoeba (wuu-eng)":90.18,"Tatoeba (xho-eng)":91.55,"Tatoeba (yid-eng)":88.79,"Tatoeba (yue-eng)":89.58,"Tatoeba (zsm-eng)":95.62} +{"Rank":2,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":67.42,"BUCC (de-en)":99.21,"BUCC (fr-en)":98.39,"BUCC (ru-en)":97.62,"BUCC (zh-en)":97.7,"Tatoeba (afr-eng)":92.59,"Tatoeba (amh-eng)":80.82,"Tatoeba (ang-eng)":25.22,"Tatoeba (ara-eng)":90.14,"Tatoeba (arq-eng)":26.63,"Tatoeba (arz-eng)":66.16,"Tatoeba (ast-eng)":76.35,"Tatoeba (awa-eng)":33.74,"Tatoeba (aze-eng)":82.41,"Tatoeba (bel-eng)":79.54,"Tatoeba (ben-eng)":89.43,"Tatoeba (ber-eng)":77.63,"Tatoeba (bos-eng)":95.86,"Tatoeba (bre-eng)":31.2,"Tatoeba (bul-eng)":93.57,"Tatoeba (cat-eng)":95.8,"Tatoeba (cbk-eng)":77.17,"Tatoeba (ceb-eng)":9.93,"Tatoeba (ces-eng)":95.52,"Tatoeba (cha-eng)":14.86,"Tatoeba (cmn-eng)":85.62,"Tatoeba (cor-eng)":4.45,"Tatoeba (csb-eng)":27.03,"Tatoeba (cym-eng)":5.85,"Tatoeba (dan-eng)":95.22,"Tatoeba (deu-eng)":99.07,"Tatoeba (dsb-eng)":42.34,"Tatoeba (dtp-eng)":7.39,"Tatoeba (ell-eng)":96.2,"Tatoeba (epo-eng)":96.61,"Tatoeba (est-eng)":96.43,"Tatoeba (eus-eng)":93.32,"Tatoeba (fao-eng)":57.04,"Tatoeba (fin-eng)":96.98,"Tatoeba (fra-eng)":94.28,"Tatoeba (fry-eng)":42.07,"Tatoeba (gla-eng)":1.52,"Tatoeba (gle-eng)":4.2,"Tatoeba (glg-eng)":96.14,"Tatoeba (gsw-eng)":27.52,"Tatoeba (heb-eng)":0.0,"Tatoeba (hin-eng)":95.32,"Tatoeba (hrv-eng)":96.72,"Tatoeba (hsb-eng)":45.75,"Tatoeba (hun-eng)":95.2,"Tatoeba (hye-eng)":88.72,"Tatoeba (ido-eng)":80.86,"Tatoeba (ile-eng)":87.88,"Tatoeba (ina-eng)":93.93,"Tatoeba (ind-eng)":92.98,"Tatoeba (isl-eng)":94.32,"Tatoeba (ita-eng)":94.32,"Tatoeba (jav-eng)":9.95,"Tatoeba (jpn-eng)":93.78,"Tatoeba (kab-eng)":65.88,"Tatoeba (kat-eng)":81.16,"Tatoeba (kaz-eng)":53.3,"Tatoeba (khm-eng)":74.19,"Tatoeba (kor-eng)":87.97,"Tatoeba (kur-eng)":19.09,"Tatoeba (kzj-eng)":4.46,"Tatoeba (lat-eng)":64.81,"Tatoeba (lfn-eng)":63.39,"Tatoeba (lit-eng)":96.2,"Tatoeba (lvs-eng)":95.33,"Tatoeba (mal-eng)":98.16,"Tatoeba (mar-eng)":92.93,"Tatoeba (max-eng)":36.96,"Tatoeba (mhr-eng)":6.86,"Tatoeba (mkd-eng)":93.63,"Tatoeba (mon-eng)":3.42,"Tatoeba (nds-eng)":77.13,"Tatoeba (nld-eng)":95.35,"Tatoeba (nno-eng)":72.75,"Tatoeba (nob-eng)":95.77,"Tatoeba (nov-eng)":60.02,"Tatoeba (oci-eng)":58.13,"Tatoeba (orv-eng)":23.24,"Tatoeba (pam-eng)":3.24,"Tatoeba (pes-eng)":93.13,"Tatoeba (pms-eng)":36.23,"Tatoeba (pol-eng)":97.32,"Tatoeba (por-eng)":94.54,"Tatoeba (ron-eng)":96.52,"Tatoeba (rus-eng)":92.58,"Tatoeba (slk-eng)":95.82,"Tatoeba (slv-eng)":95.4,"Tatoeba (spa-eng)":97.33,"Tatoeba (sqi-eng)":97.22,"Tatoeba (srp-eng)":93.64,"Tatoeba (swe-eng)":95.31,"Tatoeba (swg-eng)":33.1,"Tatoeba (swh-eng)":55.66,"Tatoeba (tam-eng)":87.32,"Tatoeba (tat-eng)":34.74,"Tatoeba (tel-eng)":96.72,"Tatoeba (tgl-eng)":63.19,"Tatoeba (tha-eng)":96.38,"Tatoeba (tuk-eng)":16.35,"Tatoeba (tur-eng)":98.03,"Tatoeba (tzl-eng)":36.56,"Tatoeba (uig-eng)":56.49,"Tatoeba (ukr-eng)":93.52,"Tatoeba (urd-eng)":84.23,"Tatoeba (uzb-eng)":23.2,"Tatoeba (vie-eng)":96.73,"Tatoeba (war-eng)":8.25,"Tatoeba (wuu-eng)":75.09,"Tatoeba (xho-eng)":4.68,"Tatoeba (yid-eng)":2.49,"Tatoeba (yue-eng)":87.75,"Tatoeba (zsm-eng)":95.41} +{"Rank":3,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":63.38,"BUCC (de-en)":98.59,"BUCC (fr-en)":96.89,"BUCC (ru-en)":96.44,"BUCC (zh-en)":97.56,"Tatoeba (afr-eng)":72.96,"Tatoeba (amh-eng)":53.49,"Tatoeba (ang-eng)":16.72,"Tatoeba (ara-eng)":90.19,"Tatoeba (arq-eng)":19.84,"Tatoeba (arz-eng)":55.69,"Tatoeba (ast-eng)":70.08,"Tatoeba (awa-eng)":42.83,"Tatoeba (aze-eng)":76.36,"Tatoeba (bel-eng)":79.94,"Tatoeba (ben-eng)":64.9,"Tatoeba (ber-eng)":4.88,"Tatoeba (bos-eng)":94.02,"Tatoeba (bre-eng)":6.42,"Tatoeba (bul-eng)":93.52,"Tatoeba (cat-eng)":96.05,"Tatoeba (cbk-eng)":58.68,"Tatoeba (ceb-eng)":7.39,"Tatoeba (ces-eng)":95.73,"Tatoeba (cha-eng)":12.59,"Tatoeba (cmn-eng)":95.83,"Tatoeba (cor-eng)":3.53,"Tatoeba (csb-eng)":23.73,"Tatoeba (cym-eng)":22.31,"Tatoeba (dan-eng)":96.17,"Tatoeba (deu-eng)":97.73,"Tatoeba (dsb-eng)":36.85,"Tatoeba (dtp-eng)":5.03,"Tatoeba (ell-eng)":94.93,"Tatoeba (epo-eng)":55.12,"Tatoeba (est-eng)":98.4,"Tatoeba (eus-eng)":31.33,"Tatoeba (fao-eng)":38.24,"Tatoeba (fin-eng)":95.92,"Tatoeba (fra-eng)":93.12,"Tatoeba (fry-eng)":43.54,"Tatoeba (gla-eng)":4.72,"Tatoeba (gle-eng)":16.85,"Tatoeba (glg-eng)":95.32,"Tatoeba (gsw-eng)":25.12,"Tatoeba (heb-eng)":88.26,"Tatoeba (hin-eng)":97.75,"Tatoeba (hrv-eng)":97.0,"Tatoeba (hsb-eng)":44.32,"Tatoeba (hun-eng)":94.18,"Tatoeba (hye-eng)":94.38,"Tatoeba (ido-eng)":43.91,"Tatoeba (ile-eng)":60.36,"Tatoeba (ina-eng)":84.32,"Tatoeba (ind-eng)":93.5,"Tatoeba (isl-eng)":59.25,"Tatoeba (ita-eng)":93.76,"Tatoeba (jav-eng)":23.39,"Tatoeba (jpn-eng)":92.51,"Tatoeba (kab-eng)":1.41,"Tatoeba (kat-eng)":95.46,"Tatoeba (kaz-eng)":61.49,"Tatoeba (khm-eng)":58.8,"Tatoeba (kor-eng)":93.07,"Tatoeba (kur-eng)":61.44,"Tatoeba (kzj-eng)":5.88,"Tatoeba (lat-eng)":24.25,"Tatoeba (lfn-eng)":49.56,"Tatoeba (lit-eng)":95.37,"Tatoeba (lvs-eng)":97.53,"Tatoeba (mal-eng)":88.46,"Tatoeba (mar-eng)":93.83,"Tatoeba (max-eng)":48.77,"Tatoeba (mhr-eng)":7.57,"Tatoeba (mkd-eng)":93.02,"Tatoeba (mon-eng)":96.14,"Tatoeba (nds-eng)":38.88,"Tatoeba (nld-eng)":95.5,"Tatoeba (nno-eng)":81.41,"Tatoeba (nob-eng)":98.53,"Tatoeba (nov-eng)":50.23,"Tatoeba (oci-eng)":43.49,"Tatoeba (orv-eng)":23.77,"Tatoeba (pam-eng)":5.39,"Tatoeba (pes-eng)":93.47,"Tatoeba (pms-eng)":34.19,"Tatoeba (pol-eng)":96.95,"Tatoeba (por-eng)":93.02,"Tatoeba (ron-eng)":96.43,"Tatoeba (rus-eng)":92.92,"Tatoeba (slk-eng)":96.62,"Tatoeba (slv-eng)":97.08,"Tatoeba (spa-eng)":97.0,"Tatoeba (sqi-eng)":98.57,"Tatoeba (srp-eng)":94.12,"Tatoeba (swe-eng)":95.45,"Tatoeba (swg-eng)":22.8,"Tatoeba (swh-eng)":16.02,"Tatoeba (tam-eng)":73.6,"Tatoeba (tat-eng)":10.89,"Tatoeba (tel-eng)":79.73,"Tatoeba (tgl-eng)":17.67,"Tatoeba (tha-eng)":95.99,"Tatoeba (tuk-eng)":14.91,"Tatoeba (tur-eng)":96.17,"Tatoeba (tzl-eng)":34.21,"Tatoeba (uig-eng)":48.35,"Tatoeba (ukr-eng)":92.67,"Tatoeba (urd-eng)":95.12,"Tatoeba (uzb-eng)":23.19,"Tatoeba (vie-eng)":97.23,"Tatoeba (war-eng)":7.42,"Tatoeba (wuu-eng)":78.25,"Tatoeba (xho-eng)":6.53,"Tatoeba (yid-eng)":30.73,"Tatoeba (yue-eng)":77.58,"Tatoeba (zsm-eng)":95.8} +{"Rank":4,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":57.98,"BUCC (de-en)":97.11,"BUCC (fr-en)":94.99,"BUCC (ru-en)":95.06,"BUCC (zh-en)":95.63,"Tatoeba (afr-eng)":58.22,"Tatoeba (amh-eng)":36.21,"Tatoeba (ang-eng)":10.24,"Tatoeba (ara-eng)":87.93,"Tatoeba (arq-eng)":18.6,"Tatoeba (arz-eng)":51.26,"Tatoeba (ast-eng)":62.17,"Tatoeba (awa-eng)":33.43,"Tatoeba (aze-eng)":62.1,"Tatoeba (bel-eng)":67.73,"Tatoeba (ben-eng)":36.48,"Tatoeba (ber-eng)":4.43,"Tatoeba (bos-eng)":93.27,"Tatoeba (bre-eng)":5.56,"Tatoeba (bul-eng)":92.65,"Tatoeba (cat-eng)":94.42,"Tatoeba (cbk-eng)":55.37,"Tatoeba (ceb-eng)":8.05,"Tatoeba (ces-eng)":95.12,"Tatoeba (cha-eng)":15.98,"Tatoeba (cmn-eng)":94.93,"Tatoeba (cor-eng)":3.42,"Tatoeba (csb-eng)":21.56,"Tatoeba (cym-eng)":13.25,"Tatoeba (dan-eng)":94.8,"Tatoeba (deu-eng)":97.02,"Tatoeba (dsb-eng)":33.43,"Tatoeba (dtp-eng)":5.69,"Tatoeba (ell-eng)":95.43,"Tatoeba (epo-eng)":41.73,"Tatoeba (est-eng)":97.33,"Tatoeba (eus-eng)":23.18,"Tatoeba (fao-eng)":27.51,"Tatoeba (fin-eng)":93.1,"Tatoeba (fra-eng)":91.72,"Tatoeba (fry-eng)":31.13,"Tatoeba (gla-eng)":3.61,"Tatoeba (gle-eng)":11.62,"Tatoeba (glg-eng)":94.0,"Tatoeba (gsw-eng)":25.74,"Tatoeba (heb-eng)":86.88,"Tatoeba (hin-eng)":97.62,"Tatoeba (hrv-eng)":95.98,"Tatoeba (hsb-eng)":36.1,"Tatoeba (hun-eng)":91.58,"Tatoeba (hye-eng)":93.28,"Tatoeba (ido-eng)":40.25,"Tatoeba (ile-eng)":57.71,"Tatoeba (ina-eng)":79.13,"Tatoeba (ind-eng)":92.74,"Tatoeba (isl-eng)":24.07,"Tatoeba (ita-eng)":93.05,"Tatoeba (jav-eng)":17.04,"Tatoeba (jpn-eng)":90.41,"Tatoeba (kab-eng)":1.16,"Tatoeba (kat-eng)":95.44,"Tatoeba (kaz-eng)":34.89,"Tatoeba (khm-eng)":32.11,"Tatoeba (kor-eng)":92.52,"Tatoeba (kur-eng)":46.94,"Tatoeba (kzj-eng)":6.24,"Tatoeba (lat-eng)":19.47,"Tatoeba (lfn-eng)":47.02,"Tatoeba (lit-eng)":93.16,"Tatoeba (lvs-eng)":97.87,"Tatoeba (mal-eng)":32.2,"Tatoeba (mar-eng)":92.38,"Tatoeba (max-eng)":45.25,"Tatoeba (mhr-eng)":6.89,"Tatoeba (mkd-eng)":91.0,"Tatoeba (mon-eng)":95.04,"Tatoeba (nds-eng)":32.16,"Tatoeba (nld-eng)":94.58,"Tatoeba (nno-eng)":76.34,"Tatoeba (nob-eng)":97.73,"Tatoeba (nov-eng)":47.99,"Tatoeba (oci-eng)":38.57,"Tatoeba (orv-eng)":15.1,"Tatoeba (pam-eng)":5.41,"Tatoeba (pes-eng)":92.59,"Tatoeba (pms-eng)":30.7,"Tatoeba (pol-eng)":94.28,"Tatoeba (por-eng)":92.13,"Tatoeba (ron-eng)":95.3,"Tatoeba (rus-eng)":91.87,"Tatoeba (slk-eng)":95.15,"Tatoeba (slv-eng)":96.92,"Tatoeba (spa-eng)":95.42,"Tatoeba (sqi-eng)":98.17,"Tatoeba (srp-eng)":92.24,"Tatoeba (swe-eng)":94.42,"Tatoeba (swg-eng)":26.31,"Tatoeba (swh-eng)":14.48,"Tatoeba (tam-eng)":24.64,"Tatoeba (tat-eng)":10.25,"Tatoeba (tel-eng)":36.4,"Tatoeba (tgl-eng)":13.09,"Tatoeba (tha-eng)":96.72,"Tatoeba (tuk-eng)":15.16,"Tatoeba (tur-eng)":95.08,"Tatoeba (tzl-eng)":25.46,"Tatoeba (uig-eng)":24.39,"Tatoeba (ukr-eng)":92.82,"Tatoeba (urd-eng)":94.57,"Tatoeba (uzb-eng)":17.14,"Tatoeba (vie-eng)":95.12,"Tatoeba (war-eng)":7.25,"Tatoeba (wuu-eng)":76.0,"Tatoeba (xho-eng)":4.52,"Tatoeba (yid-eng)":14.38,"Tatoeba (yue-eng)":71.45,"Tatoeba (zsm-eng)":95.31} +{"Rank":5,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":21.98,"BUCC (de-en)":95.04,"BUCC (fr-en)":94.96,"BUCC (ru-en)":8.33,"BUCC (zh-en)":1.3,"Tatoeba (afr-eng)":41.84,"Tatoeba (amh-eng)":0.03,"Tatoeba (ang-eng)":37.87,"Tatoeba (ara-eng)":0.61,"Tatoeba (arq-eng)":0.74,"Tatoeba (arz-eng)":0.42,"Tatoeba (ast-eng)":65.41,"Tatoeba (awa-eng)":1.46,"Tatoeba (aze-eng)":8.79,"Tatoeba (bel-eng)":5.76,"Tatoeba (ben-eng)":0.01,"Tatoeba (ber-eng)":5.92,"Tatoeba (bos-eng)":16.12,"Tatoeba (bre-eng)":6.12,"Tatoeba (bul-eng)":9.06,"Tatoeba (cat-eng)":57.4,"Tatoeba (cbk-eng)":57.68,"Tatoeba (ceb-eng)":12.56,"Tatoeba (ces-eng)":9.47,"Tatoeba (cha-eng)":27.13,"Tatoeba (cmn-eng)":1.82,"Tatoeba (cor-eng)":3.87,"Tatoeba (csb-eng)":14.41,"Tatoeba (cym-eng)":6.69,"Tatoeba (dan-eng)":54.87,"Tatoeba (deu-eng)":93.72,"Tatoeba (dsb-eng)":14.74,"Tatoeba (dtp-eng)":5.84,"Tatoeba (ell-eng)":0.6,"Tatoeba (epo-eng)":30.8,"Tatoeba (est-eng)":5.39,"Tatoeba (eus-eng)":11.9,"Tatoeba (fao-eng)":28.08,"Tatoeba (fin-eng)":6.81,"Tatoeba (fra-eng)":85.29,"Tatoeba (fry-eng)":38.68,"Tatoeba (gla-eng)":2.96,"Tatoeba (gle-eng)":3.74,"Tatoeba (glg-eng)":70.0,"Tatoeba (gsw-eng)":30.49,"Tatoeba (heb-eng)":0.87,"Tatoeba (hin-eng)":0.1,"Tatoeba (hrv-eng)":17.43,"Tatoeba (hsb-eng)":14.69,"Tatoeba (hun-eng)":7.28,"Tatoeba (hye-eng)":0.77,"Tatoeba (ido-eng)":46.65,"Tatoeba (ile-eng)":59.43,"Tatoeba (ina-eng)":82.71,"Tatoeba (ind-eng)":37.26,"Tatoeba (isl-eng)":11.21,"Tatoeba (ita-eng)":79.77,"Tatoeba (jav-eng)":7.81,"Tatoeba (jpn-eng)":0.91,"Tatoeba (kab-eng)":2.23,"Tatoeba (kat-eng)":1.48,"Tatoeba (kaz-eng)":1.77,"Tatoeba (khm-eng)":0.38,"Tatoeba (kor-eng)":1.96,"Tatoeba (kur-eng)":12.11,"Tatoeba (kzj-eng)":6.13,"Tatoeba (lat-eng)":27.84,"Tatoeba (lfn-eng)":45.89,"Tatoeba (lit-eng)":5.94,"Tatoeba (lvs-eng)":8.11,"Tatoeba (mal-eng)":0.59,"Tatoeba (mar-eng)":0.03,"Tatoeba (max-eng)":21.7,"Tatoeba (mhr-eng)":0.68,"Tatoeba (mkd-eng)":5.92,"Tatoeba (mon-eng)":2.39,"Tatoeba (nds-eng)":45.04,"Tatoeba (nld-eng)":64.75,"Tatoeba (nno-eng)":36.74,"Tatoeba (nob-eng)":54.77,"Tatoeba (nov-eng)":57.12,"Tatoeba (oci-eng)":34.39,"Tatoeba (orv-eng)":2.04,"Tatoeba (pam-eng)":8.34,"Tatoeba (pes-eng)":0.87,"Tatoeba (pms-eng)":38.06,"Tatoeba (pol-eng)":28.35,"Tatoeba (por-eng)":83.61,"Tatoeba (ron-eng)":65.27,"Tatoeba (rus-eng)":30.42,"Tatoeba (slk-eng)":13.19,"Tatoeba (slv-eng)":13.49,"Tatoeba (spa-eng)":89.18,"Tatoeba (sqi-eng)":14.66,"Tatoeba (srp-eng)":13.24,"Tatoeba (swe-eng)":60.67,"Tatoeba (swg-eng)":34.76,"Tatoeba (swh-eng)":8.07,"Tatoeba (tam-eng)":0.36,"Tatoeba (tat-eng)":1.46,"Tatoeba (tel-eng)":0.67,"Tatoeba (tgl-eng)":25.22,"Tatoeba (tha-eng)":1.58,"Tatoeba (tuk-eng)":4.99,"Tatoeba (tur-eng)":7.72,"Tatoeba (tzl-eng)":38.49,"Tatoeba (uig-eng)":0.87,"Tatoeba (ukr-eng)":9.12,"Tatoeba (urd-eng)":0.0,"Tatoeba (uzb-eng)":5.48,"Tatoeba (vie-eng)":8.45,"Tatoeba (war-eng)":13.75,"Tatoeba (wuu-eng)":1.44,"Tatoeba (xho-eng)":9.15,"Tatoeba (yid-eng)":0.28,"Tatoeba (yue-eng)":0.98,"Tatoeba (zsm-eng)":35.71} +{"Rank":6,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":19.48,"BUCC (de-en)":90.99,"BUCC (fr-en)":88.55,"BUCC (ru-en)":2.07,"BUCC (zh-en)":1.49,"Tatoeba (afr-eng)":33.47,"Tatoeba (amh-eng)":0.01,"Tatoeba (ang-eng)":30.74,"Tatoeba (ara-eng)":0.47,"Tatoeba (arq-eng)":0.34,"Tatoeba (arz-eng)":0.14,"Tatoeba (ast-eng)":51.74,"Tatoeba (awa-eng)":0.49,"Tatoeba (aze-eng)":7.43,"Tatoeba (bel-eng)":3.45,"Tatoeba (ben-eng)":0.06,"Tatoeba (ber-eng)":5.79,"Tatoeba (bos-eng)":17.43,"Tatoeba (bre-eng)":5.69,"Tatoeba (bul-eng)":7.55,"Tatoeba (cat-eng)":48.06,"Tatoeba (cbk-eng)":54.56,"Tatoeba (ceb-eng)":8.72,"Tatoeba (ces-eng)":8.76,"Tatoeba (cha-eng)":27.56,"Tatoeba (cmn-eng)":2.26,"Tatoeba (cor-eng)":3.69,"Tatoeba (csb-eng)":13.18,"Tatoeba (cym-eng)":6.97,"Tatoeba (dan-eng)":47.36,"Tatoeba (deu-eng)":91.54,"Tatoeba (dsb-eng)":13.2,"Tatoeba (dtp-eng)":4.54,"Tatoeba (ell-eng)":0.55,"Tatoeba (epo-eng)":27.86,"Tatoeba (est-eng)":5.13,"Tatoeba (eus-eng)":10.23,"Tatoeba (fao-eng)":21.44,"Tatoeba (fin-eng)":6.62,"Tatoeba (fra-eng)":79.66,"Tatoeba (fry-eng)":32.92,"Tatoeba (gla-eng)":2.87,"Tatoeba (gle-eng)":3.26,"Tatoeba (glg-eng)":63.81,"Tatoeba (gsw-eng)":29.71,"Tatoeba (heb-eng)":0.33,"Tatoeba (hin-eng)":0.25,"Tatoeba (hrv-eng)":17.16,"Tatoeba (hsb-eng)":12.02,"Tatoeba (hun-eng)":7.21,"Tatoeba (hye-eng)":0.78,"Tatoeba (ido-eng)":40.83,"Tatoeba (ile-eng)":54.95,"Tatoeba (ina-eng)":72.28,"Tatoeba (ind-eng)":30.95,"Tatoeba (isl-eng)":11.29,"Tatoeba (ita-eng)":73.83,"Tatoeba (jav-eng)":8.66,"Tatoeba (jpn-eng)":0.61,"Tatoeba (kab-eng)":1.78,"Tatoeba (kat-eng)":0.79,"Tatoeba (kaz-eng)":0.95,"Tatoeba (khm-eng)":0.49,"Tatoeba (kor-eng)":1.87,"Tatoeba (kur-eng)":10.91,"Tatoeba (kzj-eng)":5.72,"Tatoeba (lat-eng)":18.24,"Tatoeba (lfn-eng)":43.49,"Tatoeba (lit-eng)":7.13,"Tatoeba (lvs-eng)":7.04,"Tatoeba (mal-eng)":0.44,"Tatoeba (mar-eng)":0.03,"Tatoeba (max-eng)":18.99,"Tatoeba (mhr-eng)":1.11,"Tatoeba (mkd-eng)":2.49,"Tatoeba (mon-eng)":2.01,"Tatoeba (nds-eng)":39.96,"Tatoeba (nld-eng)":58.86,"Tatoeba (nno-eng)":29.07,"Tatoeba (nob-eng)":40.25,"Tatoeba (nov-eng)":50.19,"Tatoeba (oci-eng)":30.72,"Tatoeba (orv-eng)":0.85,"Tatoeba (pam-eng)":7.21,"Tatoeba (pes-eng)":0.53,"Tatoeba (pms-eng)":31.07,"Tatoeba (pol-eng)":18.06,"Tatoeba (por-eng)":81.92,"Tatoeba (ron-eng)":62.6,"Tatoeba (rus-eng)":22.24,"Tatoeba (slk-eng)":10.59,"Tatoeba (slv-eng)":11.4,"Tatoeba (spa-eng)":85.78,"Tatoeba (sqi-eng)":14.92,"Tatoeba (srp-eng)":9.87,"Tatoeba (swe-eng)":55.08,"Tatoeba (swg-eng)":32.66,"Tatoeba (swh-eng)":7.64,"Tatoeba (tam-eng)":0.49,"Tatoeba (tat-eng)":1.28,"Tatoeba (tel-eng)":0.45,"Tatoeba (tgl-eng)":23.63,"Tatoeba (tha-eng)":0.61,"Tatoeba (tuk-eng)":5.71,"Tatoeba (tur-eng)":8.25,"Tatoeba (tzl-eng)":28.4,"Tatoeba (uig-eng)":0.57,"Tatoeba (ukr-eng)":5.69,"Tatoeba (urd-eng)":0.0,"Tatoeba (uzb-eng)":4.19,"Tatoeba (vie-eng)":9.07,"Tatoeba (war-eng)":12.31,"Tatoeba (wuu-eng)":1.38,"Tatoeba (xho-eng)":7.6,"Tatoeba (yid-eng)":0.41,"Tatoeba (yue-eng)":1.31,"Tatoeba (zsm-eng)":29.74} +{"Rank":7,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":17.26,"BUCC (de-en)":87.0,"BUCC (fr-en)":88.91,"BUCC (ru-en)":0.44,"BUCC (zh-en)":0.95,"Tatoeba (afr-eng)":23.7,"Tatoeba (amh-eng)":0.65,"Tatoeba (ang-eng)":30.98,"Tatoeba (ara-eng)":0.48,"Tatoeba (arq-eng)":0.68,"Tatoeba (arz-eng)":0.22,"Tatoeba (ast-eng)":55.3,"Tatoeba (awa-eng)":1.03,"Tatoeba (aze-eng)":5.83,"Tatoeba (bel-eng)":1.66,"Tatoeba (ben-eng)":0.0,"Tatoeba (ber-eng)":5.62,"Tatoeba (bos-eng)":12.23,"Tatoeba (bre-eng)":5.84,"Tatoeba (bul-eng)":1.35,"Tatoeba (cat-eng)":48.56,"Tatoeba (cbk-eng)":46.97,"Tatoeba (ceb-eng)":9.79,"Tatoeba (ces-eng)":6.0,"Tatoeba (cha-eng)":24.21,"Tatoeba (cmn-eng)":2.26,"Tatoeba (cor-eng)":4.03,"Tatoeba (csb-eng)":9.53,"Tatoeba (cym-eng)":9.17,"Tatoeba (dan-eng)":34.63,"Tatoeba (deu-eng)":89.31,"Tatoeba (dsb-eng)":9.68,"Tatoeba (dtp-eng)":4.66,"Tatoeba (ell-eng)":0.77,"Tatoeba (epo-eng)":26.88,"Tatoeba (est-eng)":5.19,"Tatoeba (eus-eng)":9.46,"Tatoeba (fao-eng)":21.59,"Tatoeba (fin-eng)":5.66,"Tatoeba (fra-eng)":79.71,"Tatoeba (fry-eng)":28.29,"Tatoeba (gla-eng)":2.34,"Tatoeba (gle-eng)":3.55,"Tatoeba (glg-eng)":56.25,"Tatoeba (gsw-eng)":24.25,"Tatoeba (heb-eng)":0.57,"Tatoeba (hin-eng)":0.12,"Tatoeba (hrv-eng)":10.29,"Tatoeba (hsb-eng)":9.52,"Tatoeba (hun-eng)":6.22,"Tatoeba (hye-eng)":0.81,"Tatoeba (ido-eng)":41.11,"Tatoeba (ile-eng)":54.0,"Tatoeba (ina-eng)":75.47,"Tatoeba (ind-eng)":13.02,"Tatoeba (isl-eng)":8.98,"Tatoeba (ita-eng)":67.23,"Tatoeba (jav-eng)":8.54,"Tatoeba (jpn-eng)":0.99,"Tatoeba (kab-eng)":1.85,"Tatoeba (kat-eng)":1.37,"Tatoeba (kaz-eng)":0.67,"Tatoeba (khm-eng)":0.56,"Tatoeba (kor-eng)":1.73,"Tatoeba (kur-eng)":9.23,"Tatoeba (kzj-eng)":5.38,"Tatoeba (lat-eng)":21.3,"Tatoeba (lfn-eng)":40.48,"Tatoeba (lit-eng)":5.38,"Tatoeba (lvs-eng)":6.83,"Tatoeba (mal-eng)":0.45,"Tatoeba (mar-eng)":0.01,"Tatoeba (max-eng)":16.44,"Tatoeba (mhr-eng)":0.33,"Tatoeba (mkd-eng)":0.4,"Tatoeba (mon-eng)":2.48,"Tatoeba (nds-eng)":34.66,"Tatoeba (nld-eng)":42.72,"Tatoeba (nno-eng)":24.08,"Tatoeba (nob-eng)":34.17,"Tatoeba (nov-eng)":55.01,"Tatoeba (oci-eng)":29.15,"Tatoeba (orv-eng)":0.2,"Tatoeba (pam-eng)":6.99,"Tatoeba (pes-eng)":0.9,"Tatoeba (pms-eng)":30.8,"Tatoeba (pol-eng)":12.81,"Tatoeba (por-eng)":73.45,"Tatoeba (ron-eng)":54.86,"Tatoeba (rus-eng)":2.43,"Tatoeba (slk-eng)":8.35,"Tatoeba (slv-eng)":9.3,"Tatoeba (spa-eng)":78.87,"Tatoeba (sqi-eng)":11.74,"Tatoeba (srp-eng)":5.83,"Tatoeba (swe-eng)":35.41,"Tatoeba (swg-eng)":28.18,"Tatoeba (swh-eng)":7.53,"Tatoeba (tam-eng)":0.36,"Tatoeba (tat-eng)":1.01,"Tatoeba (tel-eng)":1.1,"Tatoeba (tgl-eng)":12.4,"Tatoeba (tha-eng)":1.58,"Tatoeba (tuk-eng)":4.95,"Tatoeba (tur-eng)":6.45,"Tatoeba (tzl-eng)":37.82,"Tatoeba (uig-eng)":0.67,"Tatoeba (ukr-eng)":1.88,"Tatoeba (urd-eng)":0.0,"Tatoeba (uzb-eng)":4.79,"Tatoeba (vie-eng)":7.03,"Tatoeba (war-eng)":9.68,"Tatoeba (wuu-eng)":1.28,"Tatoeba (xho-eng)":10.64,"Tatoeba (yid-eng)":0.57,"Tatoeba (yue-eng)":0.88,"Tatoeba (zsm-eng)":14.67} +{"Rank":8,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":4.54,"BUCC (de-en)":0.18,"BUCC (fr-en)":0.08,"BUCC (ru-en)":0.15,"BUCC (zh-en)":0.05,"Tatoeba (afr-eng)":4.82,"Tatoeba (amh-eng)":1.18,"Tatoeba (ang-eng)":8.54,"Tatoeba (ara-eng)":0.63,"Tatoeba (arq-eng)":0.4,"Tatoeba (arz-eng)":0.63,"Tatoeba (ast-eng)":11.69,"Tatoeba (awa-eng)":0.0,"Tatoeba (aze-eng)":3.22,"Tatoeba (bel-eng)":1.75,"Tatoeba (ben-eng)":0.2,"Tatoeba (ber-eng)":7.0,"Tatoeba (bos-eng)":9.31,"Tatoeba (bre-eng)":4.17,"Tatoeba (bul-eng)":1.29,"Tatoeba (cat-eng)":7.73,"Tatoeba (cbk-eng)":5.61,"Tatoeba (ceb-eng)":4.88,"Tatoeba (ces-eng)":3.55,"Tatoeba (cha-eng)":19.29,"Tatoeba (cmn-eng)":0.5,"Tatoeba (cor-eng)":4.15,"Tatoeba (csb-eng)":5.69,"Tatoeba (cym-eng)":8.4,"Tatoeba (dan-eng)":6.99,"Tatoeba (deu-eng)":3.67,"Tatoeba (dsb-eng)":5.33,"Tatoeba (dtp-eng)":4.25,"Tatoeba (ell-eng)":0.63,"Tatoeba (epo-eng)":2.45,"Tatoeba (est-eng)":2.69,"Tatoeba (eus-eng)":4.69,"Tatoeba (fao-eng)":7.61,"Tatoeba (fin-eng)":3.36,"Tatoeba (fra-eng)":7.0,"Tatoeba (fry-eng)":12.36,"Tatoeba (gla-eng)":3.07,"Tatoeba (gle-eng)":4.81,"Tatoeba (glg-eng)":8.12,"Tatoeba (gsw-eng)":18.87,"Tatoeba (heb-eng)":0.68,"Tatoeba (hin-eng)":0.1,"Tatoeba (hrv-eng)":5.41,"Tatoeba (hsb-eng)":6.32,"Tatoeba (hun-eng)":3.42,"Tatoeba (hye-eng)":0.97,"Tatoeba (ido-eng)":7.1,"Tatoeba (ile-eng)":13.61,"Tatoeba (ina-eng)":8.57,"Tatoeba (ind-eng)":7.26,"Tatoeba (isl-eng)":4.09,"Tatoeba (ita-eng)":5.54,"Tatoeba (jav-eng)":11.43,"Tatoeba (jpn-eng)":0.2,"Tatoeba (kab-eng)":2.71,"Tatoeba (kat-eng)":1.11,"Tatoeba (kaz-eng)":1.17,"Tatoeba (khm-eng)":0.55,"Tatoeba (kor-eng)":0.5,"Tatoeba (kur-eng)":8.55,"Tatoeba (kzj-eng)":4.61,"Tatoeba (lat-eng)":4.07,"Tatoeba (lfn-eng)":2.83,"Tatoeba (lit-eng)":0.95,"Tatoeba (lvs-eng)":3.25,"Tatoeba (mal-eng)":0.29,"Tatoeba (mar-eng)":0.2,"Tatoeba (max-eng)":14.53,"Tatoeba (mhr-eng)":0.2,"Tatoeba (mkd-eng)":0.2,"Tatoeba (mon-eng)":1.1,"Tatoeba (nds-eng)":10.37,"Tatoeba (nld-eng)":9.5,"Tatoeba (nno-eng)":4.49,"Tatoeba (nob-eng)":4.95,"Tatoeba (nov-eng)":14.53,"Tatoeba (oci-eng)":5.8,"Tatoeba (orv-eng)":0.24,"Tatoeba (pam-eng)":6.65,"Tatoeba (pes-eng)":0.5,"Tatoeba (pms-eng)":8.05,"Tatoeba (pol-eng)":5.13,"Tatoeba (por-eng)":5.87,"Tatoeba (ron-eng)":6.76,"Tatoeba (rus-eng)":0.2,"Tatoeba (slk-eng)":4.23,"Tatoeba (slv-eng)":6.05,"Tatoeba (spa-eng)":5.03,"Tatoeba (sqi-eng)":4.36,"Tatoeba (srp-eng)":1.77,"Tatoeba (swe-eng)":6.72,"Tatoeba (swg-eng)":8.54,"Tatoeba (swh-eng)":11.49,"Tatoeba (tam-eng)":1.3,"Tatoeba (tat-eng)":0.77,"Tatoeba (tel-eng)":0.85,"Tatoeba (tgl-eng)":2.61,"Tatoeba (tha-eng)":0.69,"Tatoeba (tuk-eng)":5.76,"Tatoeba (tur-eng)":5.24,"Tatoeba (tzl-eng)":15.51,"Tatoeba (uig-eng)":0.6,"Tatoeba (ukr-eng)":1.23,"Tatoeba (urd-eng)":0.4,"Tatoeba (uzb-eng)":4.73,"Tatoeba (vie-eng)":6.55,"Tatoeba (war-eng)":4.12,"Tatoeba (wuu-eng)":0.2,"Tatoeba (xho-eng)":4.33,"Tatoeba (yid-eng)":0.59,"Tatoeba (yue-eng)":0.5,"Tatoeba (zsm-eng)":7.27} +{"Rank":9,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":"","BUCC (de-en)":0.18,"BUCC (fr-en)":0.19,"BUCC (ru-en)":0.1,"BUCC (zh-en)":0.0,"Tatoeba (afr-eng)":"","Tatoeba (amh-eng)":"","Tatoeba (ang-eng)":"","Tatoeba (ara-eng)":"","Tatoeba (arq-eng)":"","Tatoeba (arz-eng)":"","Tatoeba (ast-eng)":"","Tatoeba (awa-eng)":"","Tatoeba (aze-eng)":"","Tatoeba (bel-eng)":"","Tatoeba (ben-eng)":"","Tatoeba (ber-eng)":"","Tatoeba (bos-eng)":"","Tatoeba (bre-eng)":"","Tatoeba (bul-eng)":"","Tatoeba (cat-eng)":"","Tatoeba (cbk-eng)":"","Tatoeba (ceb-eng)":"","Tatoeba (ces-eng)":"","Tatoeba (cha-eng)":"","Tatoeba (cmn-eng)":"","Tatoeba (cor-eng)":"","Tatoeba (csb-eng)":"","Tatoeba (cym-eng)":"","Tatoeba (dan-eng)":"","Tatoeba (deu-eng)":"","Tatoeba (dsb-eng)":"","Tatoeba (dtp-eng)":"","Tatoeba (ell-eng)":"","Tatoeba (epo-eng)":"","Tatoeba (est-eng)":"","Tatoeba (eus-eng)":"","Tatoeba (fao-eng)":"","Tatoeba (fin-eng)":"","Tatoeba (fra-eng)":"","Tatoeba (fry-eng)":"","Tatoeba (gla-eng)":"","Tatoeba (gle-eng)":"","Tatoeba (glg-eng)":"","Tatoeba (gsw-eng)":"","Tatoeba (heb-eng)":"","Tatoeba (hin-eng)":"","Tatoeba (hrv-eng)":"","Tatoeba (hsb-eng)":"","Tatoeba (hun-eng)":"","Tatoeba (hye-eng)":"","Tatoeba (ido-eng)":"","Tatoeba (ile-eng)":"","Tatoeba (ina-eng)":"","Tatoeba (ind-eng)":"","Tatoeba (isl-eng)":"","Tatoeba (ita-eng)":"","Tatoeba (jav-eng)":"","Tatoeba (jpn-eng)":"","Tatoeba (kab-eng)":"","Tatoeba (kat-eng)":"","Tatoeba (kaz-eng)":"","Tatoeba (khm-eng)":"","Tatoeba (kor-eng)":"","Tatoeba (kur-eng)":"","Tatoeba (kzj-eng)":"","Tatoeba (lat-eng)":"","Tatoeba (lfn-eng)":"","Tatoeba (lit-eng)":"","Tatoeba (lvs-eng)":"","Tatoeba (mal-eng)":"","Tatoeba (mar-eng)":"","Tatoeba (max-eng)":"","Tatoeba (mhr-eng)":"","Tatoeba (mkd-eng)":"","Tatoeba (mon-eng)":"","Tatoeba (nds-eng)":"","Tatoeba (nld-eng)":"","Tatoeba (nno-eng)":"","Tatoeba (nob-eng)":"","Tatoeba (nov-eng)":"","Tatoeba (oci-eng)":"","Tatoeba (orv-eng)":"","Tatoeba (pam-eng)":"","Tatoeba (pes-eng)":"","Tatoeba (pms-eng)":"","Tatoeba (pol-eng)":"","Tatoeba (por-eng)":"","Tatoeba (ron-eng)":"","Tatoeba (rus-eng)":"","Tatoeba (slk-eng)":"","Tatoeba (slv-eng)":"","Tatoeba (spa-eng)":"","Tatoeba (sqi-eng)":"","Tatoeba (srp-eng)":"","Tatoeba (swe-eng)":"","Tatoeba (swg-eng)":"","Tatoeba (swh-eng)":"","Tatoeba (tam-eng)":"","Tatoeba (tat-eng)":"","Tatoeba (tel-eng)":"","Tatoeba (tgl-eng)":"","Tatoeba (tha-eng)":"","Tatoeba (tuk-eng)":"","Tatoeba (tur-eng)":"","Tatoeba (tzl-eng)":"","Tatoeba (uig-eng)":"","Tatoeba (ukr-eng)":"","Tatoeba (urd-eng)":"","Tatoeba (uzb-eng)":"","Tatoeba (vie-eng)":"","Tatoeba (war-eng)":"","Tatoeba (wuu-eng)":"","Tatoeba (xho-eng)":"","Tatoeba (yid-eng)":"","Tatoeba (yue-eng)":"","Tatoeba (zsm-eng)":""} diff --git a/boards_data/en/data_overall/default.jsonl b/boards_data/en/data_overall/default.jsonl index b25b4473841cca8df33fd4954e4a20e080c6442e..435c55812275f7ecd599ff7559664ee3823e7917 100644 --- a/boards_data/en/data_overall/default.jsonl +++ b/boards_data/en/data_overall/default.jsonl @@ -1,242 +1,109 @@ -{"index":21,"Rank":1,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":71.67,"Classification Average (12 datasets)":88.95,"Clustering Average (11 datasets)":57.89,"PairClassification Average (3 datasets)":88.14,"Reranking Average (4 datasets)":59.86,"Retrieval Average (15 datasets)":62.16,"STS Average (10 datasets)":84.24,"Summarization Average (1 datasets)":30.77} -{"index":138,"Rank":2,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":71.19,"Classification Average (12 datasets)":87.63,"Clustering Average (11 datasets)":57.69,"PairClassification Average (3 datasets)":88.07,"Reranking Average (4 datasets)":61.21,"Retrieval Average (15 datasets)":61.01,"STS Average (10 datasets)":84.51,"Summarization Average (1 datasets)":31.49} -{"index":95,"Rank":3,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":70.31,"Classification Average (12 datasets)":89.05,"Clustering Average (11 datasets)":56.17,"PairClassification Average (3 datasets)":88.07,"Reranking Average (4 datasets)":60.14,"Retrieval Average (15 datasets)":60.18,"STS Average (10 datasets)":81.26,"Summarization Average (1 datasets)":30.71} -{"index":17,"Rank":4,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Embedding Dimensions":3584,"Max Tokens":32768,"Average (56 datasets)":70.24,"Classification Average (12 datasets)":86.58,"Clustering Average (11 datasets)":56.92,"PairClassification Average (3 datasets)":85.79,"Reranking Average (4 datasets)":61.42,"Retrieval Average (15 datasets)":60.25,"STS Average (10 datasets)":83.04,"Summarization Average (1 datasets)":31.35} -{"index":205,"Rank":5,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":70.24,"Classification Average (12 datasets)":86.58,"Clustering Average (11 datasets)":56.92,"PairClassification Average (3 datasets)":85.79,"Reranking Average (4 datasets)":61.42,"Retrieval Average (15 datasets)":60.25,"STS Average (10 datasets)":83.04,"Summarization Average (1 datasets)":31.35} -{"index":126,"Rank":6,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":70.24,"Classification Average (12 datasets)":86.58,"Clustering Average (11 datasets)":56.92,"PairClassification Average (3 datasets)":85.79,"Reranking Average (4 datasets)":61.42,"Retrieval Average (15 datasets)":60.25,"STS Average (10 datasets)":83.04,"Summarization Average (1 datasets)":31.35} -{"index":139,"Rank":7,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":70.11,"Classification Average (12 datasets)":86.67,"Clustering Average (11 datasets)":56.7,"PairClassification Average (3 datasets)":87.74,"Reranking Average (4 datasets)":60.16,"Retrieval Average (15 datasets)":58.97,"STS Average (10 datasets)":84.22,"Summarization Average (1 datasets)":31.66} -{"index":51,"Rank":8,"Model":"neural-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":69.94,"Classification Average (12 datasets)":87.91,"Clustering Average (11 datasets)":54.32,"PairClassification Average (3 datasets)":87.68,"Reranking Average (4 datasets)":61.49,"Retrieval Average (15 datasets)":58.12,"STS Average (10 datasets)":85.24,"Summarization Average (1 datasets)":30.87} -{"index":23,"Rank":9,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":69.32,"Classification Average (12 datasets)":88.08,"Clustering Average (11 datasets)":54.65,"PairClassification Average (3 datasets)":85.84,"Reranking Average (4 datasets)":59.72,"Retrieval Average (15 datasets)":59.24,"STS Average (10 datasets)":83.88,"Summarization Average (1 datasets)":0.31} -{"index":215,"Rank":10,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (56 datasets)":69.32,"Classification Average (12 datasets)":87.35,"Clustering Average (11 datasets)":52.8,"PairClassification Average (3 datasets)":86.91,"Reranking Average (4 datasets)":60.54,"Retrieval Average (15 datasets)":59.36,"STS Average (10 datasets)":82.84,"Summarization Average (1 datasets)":31.2} -{"index":6,"Rank":11,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":16000,"Average (56 datasets)":68.23,"Classification Average (12 datasets)":81.49,"Clustering Average (11 datasets)":53.35,"PairClassification Average (3 datasets)":89.24,"Reranking Average (4 datasets)":60.09,"Retrieval Average (15 datasets)":58.28,"STS Average (10 datasets)":84.31,"Summarization Average (1 datasets)":30.84} -{"index":58,"Rank":12,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (56 datasets)":68.17,"Classification Average (12 datasets)":80.2,"Clustering Average (11 datasets)":51.42,"PairClassification Average (3 datasets)":88.35,"Reranking Average (4 datasets)":60.29,"Retrieval Average (15 datasets)":60.19,"STS Average (10 datasets)":84.97,"Summarization Average (1 datasets)":30.98} -{"index":96,"Rank":13,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (56 datasets)":67.56,"Classification Average (12 datasets)":78.33,"Clustering Average (11 datasets)":51.67,"PairClassification Average (3 datasets)":88.54,"Reranking Average (4 datasets)":60.64,"Retrieval Average (15 datasets)":59.0,"STS Average (10 datasets)":85.05,"Summarization Average (1 datasets)":31.16} -{"index":15,"Rank":14,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (56 datasets)":67.34,"Classification Average (12 datasets)":79.6,"Clustering Average (11 datasets)":55.83,"PairClassification Average (3 datasets)":87.38,"Reranking Average (4 datasets)":60.13,"Retrieval Average (15 datasets)":56.24,"STS Average (10 datasets)":82.42,"Summarization Average (1 datasets)":31.46} -{"index":16,"Rank":15,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":67.16,"Classification Average (12 datasets)":82.47,"Clustering Average (11 datasets)":48.75,"PairClassification Average (3 datasets)":87.51,"Reranking Average (4 datasets)":59.98,"Retrieval Average (15 datasets)":58.29,"STS Average (10 datasets)":82.73,"Summarization Average (1 datasets)":31.17} -{"index":204,"Rank":16,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":67.16,"Classification Average (12 datasets)":82.47,"Clustering Average (11 datasets)":48.75,"PairClassification Average (3 datasets)":87.51,"Reranking Average (4 datasets)":59.98,"Retrieval Average (15 datasets)":58.29,"STS Average (10 datasets)":82.73,"Summarization Average (1 datasets)":31.17} -{"index":9,"Rank":17,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Embedding Dimensions":1024,"Max Tokens":4000,"Average (56 datasets)":67.13,"Classification Average (12 datasets)":79.25,"Clustering Average (11 datasets)":52.42,"PairClassification Average (3 datasets)":86.87,"Reranking Average (4 datasets)":58.24,"Retrieval Average (15 datasets)":56.6,"STS Average (10 datasets)":85.79,"Summarization Average (1 datasets)":31.01} -{"index":156,"Rank":18,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (56 datasets)":66.63,"Classification Average (12 datasets)":78.47,"Clustering Average (11 datasets)":50.26,"PairClassification Average (3 datasets)":88.34,"Reranking Average (4 datasets)":60.21,"Retrieval Average (15 datasets)":56.89,"STS Average (10 datasets)":84.63,"Summarization Average (1 datasets)":31.4} -{"index":1,"Rank":19,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Embedding Dimensions":768,"Max Tokens":2048,"Average (56 datasets)":66.31,"Classification Average (12 datasets)":81.17,"Clustering Average (11 datasets)":47.48,"PairClassification Average (3 datasets)":87.61,"Reranking Average (4 datasets)":58.9,"Retrieval Average (15 datasets)":55.7,"STS Average (10 datasets)":85.07,"Summarization Average (1 datasets)":32.63} -{"index":219,"Rank":20,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":65.96,"Classification Average (12 datasets)":77.17,"Clustering Average (11 datasets)":47.86,"PairClassification Average (3 datasets)":88.27,"Reranking Average (4 datasets)":60.46,"Retrieval Average (15 datasets)":57.05,"STS Average (10 datasets)":84.82,"Summarization Average (1 datasets)":30.83} -{"index":43,"Rank":21,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":65.66,"Classification Average (12 datasets)":78.53,"Clustering Average (11 datasets)":50.14,"PairClassification Average (3 datasets)":84.97,"Reranking Average (4 datasets)":59.8,"Retrieval Average (15 datasets)":55.09,"STS Average (10 datasets)":83.26,"Summarization Average (1 datasets)":29.82} -{"index":19,"Rank":22,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":65.39,"Classification Average (12 datasets)":77.75,"Clustering Average (11 datasets)":47.96,"PairClassification Average (3 datasets)":84.53,"Reranking Average (4 datasets)":58.5,"Retrieval Average (15 datasets)":57.91,"STS Average (10 datasets)":81.43,"Summarization Average (1 datasets)":30.91} -{"index":62,"Rank":23,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Embedding Dimensions":4096,"Max Tokens":8192,"Average (56 datasets)":65.01,"Classification Average (12 datasets)":75.92,"Clustering Average (11 datasets)":46.45,"PairClassification Average (3 datasets)":87.79,"Reranking Average (4 datasets)":59.68,"Retrieval Average (15 datasets)":56.63,"STS Average (10 datasets)":83.58,"Summarization Average (1 datasets)":30.94} -{"index":64,"Rank":24,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (56 datasets)":64.8,"Classification Average (12 datasets)":76.63,"Clustering Average (11 datasets)":45.54,"PairClassification Average (3 datasets)":87.99,"Reranking Average (4 datasets)":58.42,"Retrieval Average (15 datasets)":55.99,"STS Average (10 datasets)":84.09,"Summarization Average (1 datasets)":29.96} -{"index":178,"Rank":25,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":64.68,"Classification Average (12 datasets)":77.43,"Clustering Average (11 datasets)":46.32,"PairClassification Average (3 datasets)":87.34,"Reranking Average (4 datasets)":58.14,"Retrieval Average (15 datasets)":55.52,"STS Average (10 datasets)":82.56,"Summarization Average (1 datasets)":30.73} -{"index":194,"Rank":26,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":64.68,"Classification Average (12 datasets)":75.64,"Clustering Average (11 datasets)":46.71,"PairClassification Average (3 datasets)":87.2,"Reranking Average (4 datasets)":60.11,"Retrieval Average (15 datasets)":54.39,"STS Average (10 datasets)":85.0,"Summarization Average (1 datasets)":32.71} -{"index":108,"Rank":27,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":64.64,"Classification Average (12 datasets)":75.58,"Clustering Average (11 datasets)":46.73,"PairClassification Average (3 datasets)":87.25,"Reranking Average (4 datasets)":59.88,"Retrieval Average (15 datasets)":54.66,"STS Average (10 datasets)":84.54,"Summarization Average (1 datasets)":32.03} -{"index":283,"Rank":28,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":3072,"Max Tokens":8191,"Average (56 datasets)":64.59,"Classification Average (12 datasets)":75.45,"Clustering Average (11 datasets)":49.01,"PairClassification Average (3 datasets)":85.72,"Reranking Average (4 datasets)":59.16,"Retrieval Average (15 datasets)":55.44,"STS Average (10 datasets)":81.73,"Summarization Average (1 datasets)":29.92} -{"index":8,"Rank":29,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":4000,"Average (56 datasets)":64.49,"Classification Average (12 datasets)":74.79,"Clustering Average (11 datasets)":47.4,"PairClassification Average (3 datasets)":86.57,"Reranking Average (4 datasets)":59.74,"Retrieval Average (15 datasets)":55.58,"STS Average (10 datasets)":82.93,"Summarization Average (1 datasets)":30.97} -{"index":34,"Rank":30,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":512,"Average (56 datasets)":64.47,"Classification Average (12 datasets)":76.49,"Clustering Average (11 datasets)":47.43,"PairClassification Average (3 datasets)":85.84,"Reranking Average (4 datasets)":58.01,"Retrieval Average (15 datasets)":55.0,"STS Average (10 datasets)":82.62,"Summarization Average (1 datasets)":30.18} -{"index":161,"Rank":31,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":64.41,"Classification Average (12 datasets)":77.56,"Clustering Average (11 datasets)":47.1,"PairClassification Average (3 datasets)":86.19,"Reranking Average (4 datasets)":58.58,"Retrieval Average (15 datasets)":52.47,"STS Average (10 datasets)":84.78,"Summarization Average (1 datasets)":30.39} -{"index":0,"Rank":32,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Embedding Dimensions":256,"Max Tokens":2048,"Average (56 datasets)":64.37,"Classification Average (12 datasets)":79.0,"Clustering Average (11 datasets)":45.07,"PairClassification Average (3 datasets)":87.25,"Reranking Average (4 datasets)":57.78,"Retrieval Average (15 datasets)":52.44,"STS Average (10 datasets)":84.93,"Summarization Average (1 datasets)":32.36} -{"index":117,"Rank":33,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":64.34,"Classification Average (12 datasets)":76.01,"Clustering Average (11 datasets)":46.55,"PairClassification Average (3 datasets)":86.7,"Reranking Average (4 datasets)":60.05,"Retrieval Average (15 datasets)":53.44,"STS Average (10 datasets)":84.59,"Summarization Average (1 datasets)":30.96} -{"index":22,"Rank":34,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Embedding Dimensions":1024,"Max Tokens":512,"Average (56 datasets)":64.23,"Classification Average (12 datasets)":75.97,"Clustering Average (11 datasets)":46.08,"PairClassification Average (3 datasets)":87.12,"Reranking Average (4 datasets)":60.03,"Retrieval Average (15 datasets)":54.29,"STS Average (10 datasets)":83.11,"Summarization Average (1 datasets)":31.61} -{"index":261,"Rank":35,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":64.21,"Classification Average (12 datasets)":75.16,"Clustering Average (11 datasets)":46.46,"PairClassification Average (3 datasets)":87.07,"Reranking Average (4 datasets)":60.0,"Retrieval Average (15 datasets)":53.3,"STS Average (10 datasets)":85.04,"Summarization Average (1 datasets)":31.93} -{"index":197,"Rank":36,"Model":"b1ade-embed-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":64.21,"Classification Average (12 datasets)":75.16,"Clustering Average (11 datasets)":46.46,"PairClassification Average (3 datasets)":87.07,"Reranking Average (4 datasets)":60.0,"Retrieval Average (15 datasets)":53.3,"STS Average (10 datasets)":85.04,"Summarization Average (1 datasets)":31.93} -{"index":53,"Rank":37,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":64.2,"Classification Average (12 datasets)":74.5,"Clustering Average (11 datasets)":46.93,"PairClassification Average (3 datasets)":87.14,"Reranking Average (4 datasets)":59.97,"Retrieval Average (15 datasets)":53.46,"STS Average (10 datasets)":84.99,"Summarization Average (1 datasets)":32.12} -{"index":60,"Rank":38,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Embedding Dimensions":4096,"Max Tokens":4096,"Average (56 datasets)":64.14,"Classification Average (12 datasets)":76.33,"Clustering Average (11 datasets)":45.24,"PairClassification Average (3 datasets)":88.03,"Reranking Average (4 datasets)":57.38,"Retrieval Average (15 datasets)":54.6,"STS Average (10 datasets)":83.73,"Summarization Average (1 datasets)":28.49} -{"index":18,"Rank":39,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":64.11,"Classification Average (12 datasets)":77.17,"Clustering Average (11 datasets)":46.82,"PairClassification Average (3 datasets)":85.33,"Reranking Average (4 datasets)":57.66,"Retrieval Average (15 datasets)":54.09,"STS Average (10 datasets)":81.97,"Summarization Average (1 datasets)":31.17} -{"index":36,"Rank":40,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":512,"Average (56 datasets)":64.01,"Classification Average (12 datasets)":76.01,"Clustering Average (11 datasets)":46.6,"PairClassification Average (3 datasets)":86.15,"Reranking Average (4 datasets)":57.86,"Retrieval Average (15 datasets)":53.84,"STS Average (10 datasets)":83.15,"Summarization Average (1 datasets)":30.99} -{"index":115,"Rank":41,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":63.71,"Classification Average (12 datasets)":76.03,"Clustering Average (11 datasets)":46.21,"PairClassification Average (3 datasets)":86.32,"Reranking Average (4 datasets)":59.37,"Retrieval Average (15 datasets)":52.31,"STS Average (10 datasets)":83.51,"Summarization Average (1 datasets)":30.87} -{"index":42,"Rank":42,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Embedding Dimensions":"","Max Tokens":4096,"Average (56 datasets)":63.62,"Classification Average (12 datasets)":70.21,"Clustering Average (11 datasets)":50.61,"PairClassification Average (3 datasets)":82.95,"Reranking Average (4 datasets)":57.09,"Retrieval Average (15 datasets)":57.36,"STS Average (10 datasets)":79.54,"Summarization Average (1 datasets)":30.26} -{"index":20,"Rank":43,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":63.56,"Classification Average (12 datasets)":75.53,"Clustering Average (11 datasets)":45.81,"PairClassification Average (3 datasets)":86.55,"Reranking Average (4 datasets)":58.86,"Retrieval Average (15 datasets)":53.25,"STS Average (10 datasets)":82.4,"Summarization Average (1 datasets)":31.07} -{"index":180,"Rank":44,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":63.55,"Classification Average (12 datasets)":75.53,"Clustering Average (11 datasets)":45.77,"PairClassification Average (3 datasets)":86.55,"Reranking Average (4 datasets)":58.86,"Retrieval Average (15 datasets)":53.25,"STS Average (10 datasets)":82.4,"Summarization Average (1 datasets)":31.07} -{"index":179,"Rank":45,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":63.55,"Classification Average (12 datasets)":75.53,"Clustering Average (11 datasets)":45.77,"PairClassification Average (3 datasets)":86.55,"Reranking Average (4 datasets)":58.86,"Retrieval Average (15 datasets)":53.25,"STS Average (10 datasets)":82.4,"Summarization Average (1 datasets)":31.07} -{"index":181,"Rank":46,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":63.55,"Classification Average (12 datasets)":75.53,"Clustering Average (11 datasets)":45.77,"PairClassification Average (3 datasets)":86.55,"Reranking Average (4 datasets)":58.86,"Retrieval Average (15 datasets)":53.25,"STS Average (10 datasets)":82.4,"Summarization Average (1 datasets)":31.07} -{"index":182,"Rank":47,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":63.55,"Classification Average (12 datasets)":75.53,"Clustering Average (11 datasets)":45.77,"PairClassification Average (3 datasets)":86.55,"Reranking Average (4 datasets)":58.86,"Retrieval Average (15 datasets)":53.25,"STS Average (10 datasets)":82.4,"Summarization Average (1 datasets)":31.07} -{"index":186,"Rank":48,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":63.54,"Classification Average (12 datasets)":75.99,"Clustering Average (11 datasets)":45.58,"PairClassification Average (3 datasets)":87.37,"Reranking Average (4 datasets)":60.04,"Retrieval Average (15 datasets)":51.92,"STS Average (10 datasets)":83.34,"Summarization Average (1 datasets)":30.82} -{"index":170,"Rank":49,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":63.34,"Classification Average (12 datasets)":73.96,"Clustering Average (11 datasets)":46.61,"PairClassification Average (3 datasets)":86.85,"Reranking Average (4 datasets)":59.86,"Retrieval Average (15 datasets)":51.8,"STS Average (10 datasets)":83.85,"Summarization Average (1 datasets)":31.61} -{"index":193,"Rank":50,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":63.25,"Classification Average (12 datasets)":74.14,"Clustering Average (11 datasets)":46.07,"PairClassification Average (3 datasets)":85.89,"Reranking Average (4 datasets)":58.94,"Retrieval Average (15 datasets)":51.42,"STS Average (10 datasets)":84.9,"Summarization Average (1 datasets)":31.55} -{"index":253,"Rank":51,"Model":"gte-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":63.13,"Classification Average (12 datasets)":73.33,"Clustering Average (11 datasets)":46.84,"PairClassification Average (3 datasets)":85.0,"Reranking Average (4 datasets)":59.13,"Retrieval Average (15 datasets)":52.22,"STS Average (10 datasets)":83.35,"Summarization Average (1 datasets)":31.66} -{"index":119,"Rank":52,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":63.12,"Classification Average (12 datasets)":75.97,"Clustering Average (11 datasets)":44.95,"PairClassification Average (3 datasets)":84.99,"Reranking Average (4 datasets)":58.3,"Retrieval Average (15 datasets)":51.99,"STS Average (10 datasets)":83.0,"Summarization Average (1 datasets)":30.6} -{"index":125,"Rank":53,"Model":"NoInstruct-small-Embedding-v0-Q4_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":63.12,"Classification Average (12 datasets)":75.97,"Clustering Average (11 datasets)":44.95,"PairClassification Average (3 datasets)":84.99,"Reranking Average (4 datasets)":58.3,"Retrieval Average (15 datasets)":51.99,"STS Average (10 datasets)":83.0,"Summarization Average (1 datasets)":30.6} -{"index":118,"Rank":54,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":62.72,"Classification Average (12 datasets)":76.11,"Clustering Average (11 datasets)":44.82,"PairClassification Average (3 datasets)":84.68,"Reranking Average (4 datasets)":58.56,"Retrieval Average (15 datasets)":50.43,"STS Average (10 datasets)":83.03,"Summarization Average (1 datasets)":31.14} -{"index":151,"Rank":55,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":62.61,"Classification Average (12 datasets)":75.28,"Clustering Average (11 datasets)":44.9,"PairClassification Average (3 datasets)":86.45,"Reranking Average (4 datasets)":58.78,"Retrieval Average (15 datasets)":50.1,"STS Average (10 datasets)":83.02,"Summarization Average (1 datasets)":32.52} -{"index":252,"Rank":56,"Model":"gte-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":62.39,"Classification Average (12 datasets)":73.01,"Clustering Average (11 datasets)":46.2,"PairClassification Average (3 datasets)":84.57,"Reranking Average (4 datasets)":58.61,"Retrieval Average (15 datasets)":51.14,"STS Average (10 datasets)":82.3,"Summarization Average (1 datasets)":31.17} -{"index":26,"Rank":57,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":62.39,"Classification Average (12 datasets)":74.12,"Clustering Average (11 datasets)":43.91,"PairClassification Average (3 datasets)":85.15,"Reranking Average (4 datasets)":55.69,"Retrieval Average (15 datasets)":52.81,"STS Average (10 datasets)":82.06,"Summarization Average (1 datasets)":30.08} -{"index":28,"Rank":58,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":62.39,"Classification Average (12 datasets)":74.12,"Clustering Average (11 datasets)":43.91,"PairClassification Average (3 datasets)":85.15,"Reranking Average (4 datasets)":55.69,"Retrieval Average (15 datasets)":52.81,"STS Average (10 datasets)":82.06,"Summarization Average (1 datasets)":30.08} -{"index":29,"Rank":59,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":62.39,"Classification Average (12 datasets)":74.12,"Clustering Average (11 datasets)":43.91,"PairClassification Average (3 datasets)":85.15,"Reranking Average (4 datasets)":55.69,"Retrieval Average (15 datasets)":52.81,"STS Average (10 datasets)":82.06,"Summarization Average (1 datasets)":30.08} -{"index":27,"Rank":60,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":62.39,"Classification Average (12 datasets)":74.12,"Clustering Average (11 datasets)":43.91,"PairClassification Average (3 datasets)":85.15,"Reranking Average (4 datasets)":55.69,"Retrieval Average (15 datasets)":52.81,"STS Average (10 datasets)":82.06,"Summarization Average (1 datasets)":30.08} -{"index":129,"Rank":61,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Embedding Dimensions":768,"Max Tokens":8192,"Average (56 datasets)":62.39,"Classification Average (12 datasets)":74.12,"Clustering Average (11 datasets)":43.91,"PairClassification Average (3 datasets)":85.15,"Reranking Average (4 datasets)":55.69,"Retrieval Average (15 datasets)":52.81,"STS Average (10 datasets)":82.06,"Summarization Average (1 datasets)":30.08} -{"index":206,"Rank":62,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Embedding Dimensions":768,"Max Tokens":8192,"Average (56 datasets)":62.39,"Classification Average (12 datasets)":74.12,"Clustering Average (11 datasets)":43.91,"PairClassification Average (3 datasets)":85.15,"Reranking Average (4 datasets)":55.69,"Retrieval Average (15 datasets)":52.81,"STS Average (10 datasets)":82.06,"Summarization Average (1 datasets)":30.08} -{"index":213,"Rank":63,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":62.28,"Classification Average (12 datasets)":73.55,"Clustering Average (11 datasets)":43.93,"PairClassification Average (3 datasets)":84.61,"Reranking Average (4 datasets)":55.78,"Retrieval Average (15 datasets)":53.01,"STS Average (10 datasets)":81.94,"Summarization Average (1 datasets)":30.4} -{"index":284,"Rank":64,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":8191,"Average (56 datasets)":62.26,"Classification Average (12 datasets)":73.21,"Clustering Average (11 datasets)":46.65,"PairClassification Average (3 datasets)":85.04,"Reranking Average (4 datasets)":56.72,"Retrieval Average (15 datasets)":51.08,"STS Average (10 datasets)":81.58,"Summarization Average (1 datasets)":31.12} -{"index":155,"Rank":65,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Embedding Dimensions":1024,"Max Tokens":512,"Average (56 datasets)":62.25,"Classification Average (12 datasets)":75.24,"Clustering Average (11 datasets)":44.49,"PairClassification Average (3 datasets)":86.03,"Reranking Average (4 datasets)":56.61,"Retrieval Average (15 datasets)":50.56,"STS Average (10 datasets)":82.05,"Summarization Average (1 datasets)":30.19} -{"index":24,"Rank":66,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":512,"Max Tokens":512,"Average (56 datasets)":62.17,"Classification Average (12 datasets)":74.14,"Clustering Average (11 datasets)":43.82,"PairClassification Average (3 datasets)":84.92,"Reranking Average (4 datasets)":58.36,"Retrieval Average (15 datasets)":51.68,"STS Average (10 datasets)":81.59,"Summarization Average (1 datasets)":30.12} -{"index":33,"Rank":67,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":62.01,"Classification Average (12 datasets)":74.31,"Clustering Average (11 datasets)":44.64,"PairClassification Average (3 datasets)":85.05,"Reranking Average (4 datasets)":56.09,"Retrieval Average (15 datasets)":51.34,"STS Average (10 datasets)":80.92,"Summarization Average (1 datasets)":31.29} -{"index":282,"Rank":68,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":256,"Max Tokens":8191,"Average (56 datasets)":62.0,"Classification Average (12 datasets)":71.97,"Clustering Average (11 datasets)":46.23,"PairClassification Average (3 datasets)":84.22,"Reranking Average (4 datasets)":57.99,"Retrieval Average (15 datasets)":51.66,"STS Average (10 datasets)":81.04,"Summarization Average (1 datasets)":29.92} -{"index":211,"Rank":69,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":512,"Max Tokens":8192,"Average (56 datasets)":61.96,"Classification Average (12 datasets)":73.24,"Clustering Average (11 datasets)":43.71,"PairClassification Average (3 datasets)":84.59,"Reranking Average (4 datasets)":55.65,"Retrieval Average (15 datasets)":52.4,"STS Average (10 datasets)":81.7,"Summarization Average (1 datasets)":30.47} -{"index":66,"Rank":70,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Embedding Dimensions":2048,"Max Tokens":4096,"Average (56 datasets)":61.85,"Classification Average (12 datasets)":72.21,"Clustering Average (11 datasets)":43.57,"PairClassification Average (3 datasets)":86.21,"Reranking Average (4 datasets)":55.38,"Retrieval Average (15 datasets)":51.44,"STS Average (10 datasets)":83.58,"Summarization Average (1 datasets)":30.01} -{"index":149,"Rank":71,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":61.79,"Classification Average (12 datasets)":73.12,"Clustering Average (11 datasets)":44.74,"PairClassification Average (3 datasets)":86.62,"Reranking Average (4 datasets)":57.29,"Retrieval Average (15 datasets)":49.26,"STS Average (10 datasets)":83.06,"Summarization Average (1 datasets)":32.32} -{"index":148,"Rank":72,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":61.59,"Classification Average (12 datasets)":73.86,"Clustering Average (11 datasets)":45.29,"PairClassification Average (3 datasets)":85.89,"Reranking Average (4 datasets)":57.54,"Retrieval Average (15 datasets)":47.57,"STS Average (10 datasets)":83.15,"Summarization Average (1 datasets)":31.84} -{"index":140,"Rank":73,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":61.5,"Classification Average (12 datasets)":73.84,"Clustering Average (11 datasets)":43.8,"PairClassification Average (3 datasets)":85.73,"Reranking Average (4 datasets)":55.91,"Retrieval Average (15 datasets)":50.29,"STS Average (10 datasets)":81.05,"Summarization Average (1 datasets)":30.28} -{"index":153,"Rank":74,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":61.5,"Classification Average (12 datasets)":73.84,"Clustering Average (11 datasets)":43.8,"PairClassification Average (3 datasets)":85.73,"Reranking Average (4 datasets)":55.91,"Retrieval Average (15 datasets)":50.29,"STS Average (10 datasets)":81.05,"Summarization Average (1 datasets)":30.28} -{"index":160,"Rank":75,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (56 datasets)":61.5,"Classification Average (12 datasets)":74.81,"Clustering Average (11 datasets)":41.06,"PairClassification Average (3 datasets)":84.75,"Reranking Average (4 datasets)":55.86,"Retrieval Average (15 datasets)":51.43,"STS Average (10 datasets)":81.56,"Summarization Average (1 datasets)":29.69} -{"index":154,"Rank":76,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Embedding Dimensions":1024,"Max Tokens":512,"Average (56 datasets)":61.42,"Classification Average (12 datasets)":73.14,"Clustering Average (11 datasets)":43.33,"PairClassification Average (3 datasets)":85.94,"Reranking Average (4 datasets)":56.53,"Retrieval Average (15 datasets)":49.99,"STS Average (10 datasets)":82.06,"Summarization Average (1 datasets)":30.97} -{"index":207,"Rank":77,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":61.36,"Classification Average (12 datasets)":73.65,"Clustering Average (11 datasets)":43.7,"PairClassification Average (3 datasets)":84.59,"Reranking Average (4 datasets)":53.32,"Retrieval Average (15 datasets)":51.43,"STS Average (10 datasets)":80.22,"Summarization Average (1 datasets)":31.28} -{"index":254,"Rank":78,"Model":"gte-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":61.36,"Classification Average (12 datasets)":72.31,"Clustering Average (11 datasets)":44.89,"PairClassification Average (3 datasets)":83.54,"Reranking Average (4 datasets)":57.7,"Retrieval Average (15 datasets)":49.46,"STS Average (10 datasets)":82.07,"Summarization Average (1 datasets)":30.42} -{"index":210,"Rank":79,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":256,"Max Tokens":8192,"Average (56 datasets)":61.04,"Classification Average (12 datasets)":72.1,"Clustering Average (11 datasets)":43.16,"PairClassification Average (3 datasets)":84.09,"Reranking Average (4 datasets)":55.18,"Retrieval Average (15 datasets)":50.81,"STS Average (10 datasets)":81.34,"Summarization Average (1 datasets)":30.05} -{"index":281,"Rank":80,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":8191,"Average (56 datasets)":60.99,"Classification Average (12 datasets)":70.93,"Clustering Average (11 datasets)":45.9,"PairClassification Average (3 datasets)":84.89,"Reranking Average (4 datasets)":56.32,"Retrieval Average (15 datasets)":49.25,"STS Average (10 datasets)":80.97,"Summarization Average (1 datasets)":30.8} -{"index":169,"Rank":81,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":60.63,"Classification Average (12 datasets)":72.13,"Clustering Average (11 datasets)":40.81,"PairClassification Average (3 datasets)":85.4,"Reranking Average (4 datasets)":55.91,"Retrieval Average (15 datasets)":49.34,"STS Average (10 datasets)":83.01,"Summarization Average (1 datasets)":30.97} -{"index":152,"Rank":82,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":60.44,"Classification Average (12 datasets)":72.63,"Clustering Average (11 datasets)":42.11,"PairClassification Average (3 datasets)":85.09,"Reranking Average (4 datasets)":55.7,"Retrieval Average (15 datasets)":48.75,"STS Average (10 datasets)":80.96,"Summarization Average (1 datasets)":31.01} -{"index":175,"Rank":83,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Embedding Dimensions":768,"Max Tokens":8192,"Average (56 datasets)":60.38,"Classification Average (12 datasets)":73.45,"Clustering Average (11 datasets)":41.73,"PairClassification Average (3 datasets)":85.38,"Reranking Average (4 datasets)":56.98,"Retrieval Average (15 datasets)":47.87,"STS Average (10 datasets)":80.7,"Summarization Average (1 datasets)":31.6} -{"index":112,"Rank":84,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":60.37,"Classification Average (12 datasets)":70.62,"Clustering Average (11 datasets)":40.4,"PairClassification Average (3 datasets)":82.14,"Reranking Average (4 datasets)":57.5,"Retrieval Average (15 datasets)":51.31,"STS Average (10 datasets)":81.54,"Summarization Average (1 datasets)":27.54} -{"index":101,"Rank":85,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":60.09,"Classification Average (12 datasets)":68.01,"Clustering Average (11 datasets)":41.51,"PairClassification Average (3 datasets)":82.02,"Reranking Average (4 datasets)":55.47,"Retrieval Average (15 datasets)":54.83,"STS Average (10 datasets)":77.08,"Summarization Average (1 datasets)":31.38} -{"index":35,"Rank":86,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":384,"Max Tokens":512,"Average (56 datasets)":60.08,"Classification Average (12 datasets)":70.57,"Clustering Average (11 datasets)":41.98,"PairClassification Average (3 datasets)":83.95,"Reranking Average (4 datasets)":55.06,"Retrieval Average (15 datasets)":50.15,"STS Average (10 datasets)":80.09,"Summarization Average (1 datasets)":30.41} -{"index":158,"Rank":87,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":59.93,"Classification Average (12 datasets)":72.94,"Clustering Average (11 datasets)":39.92,"PairClassification Average (3 datasets)":84.67,"Reranking Average (4 datasets)":54.32,"Retrieval Average (15 datasets)":49.04,"STS Average (10 datasets)":80.39,"Summarization Average (1 datasets)":31.16} -{"index":167,"Rank":88,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":59.86,"Classification Average (12 datasets)":71.91,"Clustering Average (11 datasets)":40.74,"PairClassification Average (3 datasets)":84.06,"Reranking Average (4 datasets)":54.9,"Retrieval Average (15 datasets)":47.67,"STS Average (10 datasets)":82.37,"Summarization Average (1 datasets)":30.62} -{"index":208,"Rank":89,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":59.85,"Classification Average (12 datasets)":71.17,"Clustering Average (11 datasets)":42.52,"PairClassification Average (3 datasets)":83.67,"Reranking Average (4 datasets)":54.99,"Retrieval Average (15 datasets)":48.0,"STS Average (10 datasets)":80.84,"Summarization Average (1 datasets)":30.72} -{"index":121,"Rank":90,"Model":"snowflake-arctic-embed-l-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":59.84,"Classification Average (12 datasets)":67.07,"Clustering Average (11 datasets)":41.49,"PairClassification Average (3 datasets)":80.8,"Reranking Average (4 datasets)":55.46,"Retrieval Average (15 datasets)":55.98,"STS Average (10 datasets)":75.48,"Summarization Average (1 datasets)":30.84} -{"index":99,"Rank":91,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":59.84,"Classification Average (12 datasets)":67.07,"Clustering Average (11 datasets)":41.49,"PairClassification Average (3 datasets)":80.8,"Reranking Average (4 datasets)":55.46,"Retrieval Average (15 datasets)":55.98,"STS Average (10 datasets)":75.48,"Summarization Average (1 datasets)":30.84} -{"index":100,"Rank":92,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":59.79,"Classification Average (12 datasets)":68.23,"Clustering Average (11 datasets)":41.87,"PairClassification Average (3 datasets)":81.07,"Reranking Average (4 datasets)":55.42,"Retrieval Average (15 datasets)":54.91,"STS Average (10 datasets)":75.03,"Summarization Average (1 datasets)":30.31} -{"index":147,"Rank":93,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":"N\/A","Average (56 datasets)":59.54,"Classification Average (12 datasets)":72.36,"Clustering Average (11 datasets)":41.9,"PairClassification Average (3 datasets)":83.51,"Reranking Average (4 datasets)":56.2,"Retrieval Average (15 datasets)":45.12,"STS Average (10 datasets)":82.29,"Summarization Average (1 datasets)":29.85} -{"index":246,"Rank":94,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":59.51,"Classification Average (12 datasets)":73.42,"Clustering Average (11 datasets)":43.72,"PairClassification Average (3 datasets)":85.06,"Reranking Average (4 datasets)":56.42,"Retrieval Average (15 datasets)":42.24,"STS Average (10 datasets)":82.63,"Summarization Average (1 datasets)":30.08} -{"index":159,"Rank":95,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (56 datasets)":59.45,"Classification Average (12 datasets)":73.02,"Clustering Average (11 datasets)":37.89,"PairClassification Average (3 datasets)":83.57,"Reranking Average (4 datasets)":54.84,"Retrieval Average (15 datasets)":48.88,"STS Average (10 datasets)":80.26,"Summarization Average (1 datasets)":30.11} -{"index":209,"Rank":96,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":128,"Max Tokens":8192,"Average (56 datasets)":59.34,"Classification Average (12 datasets)":69.7,"Clustering Average (11 datasets)":42.24,"PairClassification Average (3 datasets)":83.54,"Reranking Average (4 datasets)":54.56,"Retrieval Average (15 datasets)":47.75,"STS Average (10 datasets)":80.74,"Summarization Average (1 datasets)":29.59} -{"index":185,"Rank":97,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":59.29,"Classification Average (12 datasets)":72.25,"Clustering Average (11 datasets)":43.48,"PairClassification Average (3 datasets)":79.23,"Reranking Average (4 datasets)":57.12,"Retrieval Average (15 datasets)":44.99,"STS Average (10 datasets)":80.47,"Summarization Average (1 datasets)":29.02} -{"index":116,"Rank":98,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":59.0,"Classification Average (12 datasets)":72.72,"Clustering Average (11 datasets)":39.48,"PairClassification Average (3 datasets)":83.39,"Reranking Average (4 datasets)":57.95,"Retrieval Average (15 datasets)":45.12,"STS Average (10 datasets)":80.72,"Summarization Average (1 datasets)":31.22} -{"index":238,"Rank":99,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":58.97,"Classification Average (12 datasets)":67.41,"Clustering Average (11 datasets)":42.42,"PairClassification Average (3 datasets)":86.12,"Reranking Average (4 datasets)":56.66,"Retrieval Average (15 datasets)":48.48,"STS Average (10 datasets)":78.38,"Summarization Average (1 datasets)":30.64} -{"index":103,"Rank":100,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":58.94,"Classification Average (12 datasets)":65.65,"Clustering Average (11 datasets)":40.88,"PairClassification Average (3 datasets)":82.07,"Reranking Average (4 datasets)":55.31,"Retrieval Average (15 datasets)":51.98,"STS Average (10 datasets)":78.44,"Summarization Average (1 datasets)":31.75} -{"index":83,"Rank":101,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":58.93,"Classification Average (12 datasets)":68.13,"Clustering Average (11 datasets)":40.34,"PairClassification Average (3 datasets)":82.0,"Reranking Average (4 datasets)":56.56,"Retrieval Average (15 datasets)":50.25,"STS Average (10 datasets)":78.1,"Summarization Average (1 datasets)":31.46} -{"index":157,"Rank":102,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average (56 datasets)":58.89,"Classification Average (12 datasets)":71.67,"Clustering Average (11 datasets)":39.51,"PairClassification Average (3 datasets)":85.08,"Reranking Average (4 datasets)":54.45,"Retrieval Average (15 datasets)":46.01,"STS Average (10 datasets)":80.87,"Summarization Average (1 datasets)":31.39} -{"index":107,"Rank":103,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":58.69,"Classification Average (12 datasets)":70.35,"Clustering Average (11 datasets)":42.09,"PairClassification Average (3 datasets)":82.83,"Reranking Average (4 datasets)":55.77,"Retrieval Average (15 datasets)":44.92,"STS Average (10 datasets)":80.46,"Summarization Average (1 datasets)":29.47} -{"index":237,"Rank":104,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":58.42,"Classification Average (12 datasets)":67.11,"Clustering Average (11 datasets)":41.51,"PairClassification Average (3 datasets)":86.13,"Reranking Average (4 datasets)":55.96,"Retrieval Average (15 datasets)":47.96,"STS Average (10 datasets)":77.8,"Summarization Average (1 datasets)":30.21} -{"index":166,"Rank":105,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":2048,"Average (56 datasets)":58.29,"Classification Average (12 datasets)":70.17,"Clustering Average (11 datasets)":39.11,"PairClassification Average (3 datasets)":83.11,"Reranking Average (4 datasets)":54.28,"Retrieval Average (15 datasets)":45.27,"STS Average (10 datasets)":81.52,"Summarization Average (1 datasets)":31.1} -{"index":236,"Rank":106,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":58.28,"Classification Average (12 datasets)":67.14,"Clustering Average (11 datasets)":41.6,"PairClassification Average (3 datasets)":85.32,"Reranking Average (4 datasets)":55.36,"Retrieval Average (15 datasets)":47.42,"STS Average (10 datasets)":78.19,"Summarization Average (1 datasets)":29.5} -{"index":69,"Rank":107,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":58.07,"Classification Average (12 datasets)":70.25,"Clustering Average (11 datasets)":40.7,"PairClassification Average (3 datasets)":83.32,"Reranking Average (4 datasets)":55.34,"Retrieval Average (15 datasets)":43.97,"STS Average (10 datasets)":79.89,"Summarization Average (1 datasets)":31.27} -{"index":177,"Rank":108,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":58.0,"Classification Average (12 datasets)":68.82,"Clustering Average (11 datasets)":40.08,"PairClassification Average (3 datasets)":84.44,"Reranking Average (4 datasets)":55.09,"Retrieval Average (15 datasets)":45.14,"STS Average (10 datasets)":80.0,"Summarization Average (1 datasets)":30.56} -{"index":184,"Rank":109,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":57.97,"Classification Average (12 datasets)":70.55,"Clustering Average (11 datasets)":42.97,"PairClassification Average (3 datasets)":77.83,"Reranking Average (4 datasets)":55.6,"Retrieval Average (15 datasets)":43.39,"STS Average (10 datasets)":79.02,"Summarization Average (1 datasets)":30.25} -{"index":162,"Rank":110,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (56 datasets)":57.87,"Classification Average (12 datasets)":70.74,"Clustering Average (11 datasets)":37.08,"PairClassification Average (3 datasets)":82.59,"Reranking Average (4 datasets)":53.87,"Retrieval Average (15 datasets)":46.64,"STS Average (10 datasets)":79.1,"Summarization Average (1 datasets)":29.98} -{"index":245,"Rank":111,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":57.87,"Classification Average (12 datasets)":72.84,"Clustering Average (11 datasets)":42.34,"PairClassification Average (3 datasets)":86.06,"Reranking Average (4 datasets)":54.71,"Retrieval Average (15 datasets)":38.47,"STS Average (10 datasets)":81.66,"Summarization Average (1 datasets)":29.91} -{"index":230,"Rank":112,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average (56 datasets)":57.78,"Classification Average (12 datasets)":65.03,"Clustering Average (11 datasets)":43.69,"PairClassification Average (3 datasets)":83.04,"Reranking Average (4 datasets)":59.36,"Retrieval Average (15 datasets)":43.81,"STS Average (10 datasets)":80.32,"Summarization Average (1 datasets)":27.49} -{"index":123,"Rank":113,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":57.59,"Classification Average (12 datasets)":66.19,"Clustering Average (11 datasets)":38.93,"PairClassification Average (3 datasets)":81.9,"Reranking Average (4 datasets)":55.65,"Retrieval Average (15 datasets)":48.22,"STS Average (10 datasets)":77.74,"Summarization Average (1 datasets)":33.6} -{"index":172,"Rank":114,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":57.38,"Classification Average (12 datasets)":67.76,"Clustering Average (11 datasets)":37.15,"PairClassification Average (3 datasets)":84.8,"Reranking Average (4 datasets)":56.42,"Retrieval Average (15 datasets)":44.81,"STS Average (10 datasets)":80.96,"Summarization Average (1 datasets)":29.85} -{"index":82,"Rank":115,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":57.17,"Classification Average (12 datasets)":67.13,"Clustering Average (11 datasets)":39.83,"PairClassification Average (3 datasets)":80.65,"Reranking Average (4 datasets)":54.67,"Retrieval Average (15 datasets)":46.54,"STS Average (10 datasets)":76.83,"Summarization Average (1 datasets)":31.03} -{"index":244,"Rank":116,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":57.06,"Classification Average (12 datasets)":72.31,"Clustering Average (11 datasets)":41.65,"PairClassification Average (3 datasets)":84.97,"Reranking Average (4 datasets)":54.0,"Retrieval Average (15 datasets)":36.71,"STS Average (10 datasets)":81.83,"Summarization Average (1 datasets)":29.64} -{"index":104,"Rank":117,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":56.98,"Classification Average (12 datasets)":62.94,"Clustering Average (11 datasets)":38.86,"PairClassification Average (3 datasets)":81.33,"Reranking Average (4 datasets)":54.4,"Retrieval Average (15 datasets)":50.15,"STS Average (10 datasets)":76.39,"Summarization Average (1 datasets)":30.1} -{"index":183,"Rank":118,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":56.81,"Classification Average (12 datasets)":69.65,"Clustering Average (11 datasets)":40.86,"PairClassification Average (3 datasets)":76.9,"Reranking Average (4 datasets)":55.5,"Retrieval Average (15 datasets)":41.41,"STS Average (10 datasets)":79.11,"Summarization Average (1 datasets)":31.01} -{"index":65,"Rank":119,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (56 datasets)":56.8,"Classification Average (12 datasets)":74.07,"Clustering Average (11 datasets)":40.63,"PairClassification Average (3 datasets)":80.94,"Reranking Average (4 datasets)":53.98,"Retrieval Average (15 datasets)":38.05,"STS Average (10 datasets)":78.5,"Summarization Average (1 datasets)":30.19} -{"index":106,"Rank":120,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":56.57,"Classification Average (12 datasets)":68.04,"Clustering Average (11 datasets)":39.18,"PairClassification Average (3 datasets)":82.81,"Reranking Average (4 datasets)":54.29,"Retrieval Average (15 datasets)":42.56,"STS Average (10 datasets)":78.65,"Summarization Average (1 datasets)":29.87} -{"index":228,"Rank":121,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average (56 datasets)":56.46,"Classification Average (12 datasets)":62.87,"Clustering Average (11 datasets)":41.81,"PairClassification Average (3 datasets)":82.41,"Reranking Average (4 datasets)":58.44,"Retrieval Average (15 datasets)":42.69,"STS Average (10 datasets)":79.84,"Summarization Average (1 datasets)":27.9} -{"index":285,"Rank":122,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":30522,"Max Tokens":512,"Average (56 datasets)":56.33,"Classification Average (12 datasets)":65.94,"Clustering Average (11 datasets)":32.28,"PairClassification Average (3 datasets)":81.32,"Reranking Average (4 datasets)":53.27,"Retrieval Average (15 datasets)":50.02,"STS Average (10 datasets)":76.95,"Summarization Average (1 datasets)":31.03} -{"index":229,"Rank":123,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (56 datasets)":56.26,"Classification Average (12 datasets)":63.05,"Clustering Average (11 datasets)":42.35,"PairClassification Average (3 datasets)":82.37,"Reranking Average (4 datasets)":58.04,"Retrieval Average (15 datasets)":41.95,"STS Average (10 datasets)":78.9,"Summarization Average (1 datasets)":30.81} -{"index":171,"Rank":124,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":56.26,"Classification Average (12 datasets)":66.07,"Clustering Average (11 datasets)":35.88,"PairClassification Average (3 datasets)":83.04,"Reranking Average (4 datasets)":55.84,"Retrieval Average (15 datasets)":44.03,"STS Average (10 datasets)":79.93,"Summarization Average (1 datasets)":30.71} -{"index":63,"Rank":125,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Embedding Dimensions":4096,"Max Tokens":8192,"Average (56 datasets)":56.23,"Classification Average (12 datasets)":71.88,"Clustering Average (11 datasets)":41.99,"PairClassification Average (3 datasets)":78.01,"Reranking Average (4 datasets)":53.09,"Retrieval Average (15 datasets)":39.19,"STS Average (10 datasets)":75.86,"Summarization Average (1 datasets)":31.45} -{"index":79,"Rank":126,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":56.2,"Classification Average (12 datasets)":66.52,"Clustering Average (11 datasets)":39.92,"PairClassification Average (3 datasets)":79.58,"Reranking Average (4 datasets)":54.0,"Retrieval Average (15 datasets)":44.49,"STS Average (10 datasets)":75.74,"Summarization Average (1 datasets)":30.43} -{"index":235,"Rank":127,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":56.19,"Classification Average (12 datasets)":65.25,"Clustering Average (11 datasets)":38.63,"PairClassification Average (3 datasets)":83.85,"Reranking Average (4 datasets)":54.23,"Retrieval Average (15 datasets)":44.67,"STS Average (10 datasets)":77.07,"Summarization Average (1 datasets)":29.67} -{"index":212,"Rank":128,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":64,"Max Tokens":8192,"Average (56 datasets)":56.1,"Classification Average (12 datasets)":66.48,"Clustering Average (11 datasets)":40.31,"PairClassification Average (3 datasets)":82.26,"Reranking Average (4 datasets)":53.42,"Retrieval Average (15 datasets)":40.92,"STS Average (10 datasets)":79.79,"Summarization Average (1 datasets)":28.41} -{"index":214,"Rank":129,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":56.0,"Classification Average (12 datasets)":66.68,"Clustering Average (11 datasets)":41.1,"PairClassification Average (3 datasets)":82.54,"Reranking Average (4 datasets)":53.14,"Retrieval Average (15 datasets)":41.88,"STS Average (10 datasets)":76.51,"Summarization Average (1 datasets)":30.36} -{"index":168,"Rank":130,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":2048,"Average (56 datasets)":55.81,"Classification Average (12 datasets)":68.04,"Clustering Average (11 datasets)":36.89,"PairClassification Average (3 datasets)":81.05,"Reranking Average (4 datasets)":52.6,"Retrieval Average (15 datasets)":41.19,"STS Average (10 datasets)":79.93,"Summarization Average (1 datasets)":32.06} -{"index":105,"Rank":131,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":55.71,"Classification Average (12 datasets)":66.35,"Clustering Average (11 datasets)":39.46,"PairClassification Average (3 datasets)":81.77,"Reranking Average (4 datasets)":54.28,"Retrieval Average (15 datasets)":40.82,"STS Average (10 datasets)":78.37,"Summarization Average (1 datasets)":31.16} -{"index":61,"Rank":132,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Embedding Dimensions":4096,"Max Tokens":4096,"Average (56 datasets)":55.36,"Classification Average (12 datasets)":71.57,"Clustering Average (11 datasets)":40.83,"PairClassification Average (3 datasets)":77.88,"Reranking Average (4 datasets)":52.95,"Retrieval Average (15 datasets)":36.75,"STS Average (10 datasets)":76.4,"Summarization Average (1 datasets)":31.38} -{"index":243,"Rank":133,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":55.27,"Classification Average (12 datasets)":69.81,"Clustering Average (11 datasets)":40.21,"PairClassification Average (3 datasets)":85.18,"Reranking Average (4 datasets)":53.09,"Retrieval Average (15 datasets)":33.63,"STS Average (10 datasets)":81.14,"Summarization Average (1 datasets)":31.39} -{"index":68,"Rank":134,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":55.04,"Classification Average (12 datasets)":67.9,"Clustering Average (11 datasets)":37.6,"PairClassification Average (3 datasets)":82.69,"Reranking Average (4 datasets)":53.49,"Retrieval Average (15 datasets)":37.94,"STS Average (10 datasets)":79.18,"Summarization Average (1 datasets)":30.97} -{"index":113,"Rank":135,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":54.42,"Classification Average (12 datasets)":67.8,"Clustering Average (11 datasets)":38.03,"PairClassification Average (3 datasets)":81.4,"Reranking Average (4 datasets)":53.64,"Retrieval Average (15 datasets)":36.08,"STS Average (10 datasets)":78.59,"Summarization Average (1 datasets)":29.83} -{"index":44,"Rank":136,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":54.24,"Classification Average (12 datasets)":67.3,"Clustering Average (11 datasets)":40.9,"PairClassification Average (3 datasets)":80.4,"Reranking Average (4 datasets)":53.95,"Retrieval Average (15 datasets)":36.99,"STS Average (10 datasets)":73.7,"Summarization Average (1 datasets)":31.23} -{"index":84,"Rank":137,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":53.74,"Classification Average (12 datasets)":70.14,"Clustering Average (11 datasets)":36.98,"PairClassification Average (3 datasets)":77.03,"Reranking Average (4 datasets)":52.33,"Retrieval Average (15 datasets)":32.34,"STS Average (10 datasets)":80.53,"Summarization Average (1 datasets)":30.38} -{"index":134,"Rank":138,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (56 datasets)":53.29,"Classification Average (12 datasets)":61.67,"Clustering Average (11 datasets)":35.67,"PairClassification Average (3 datasets)":80.86,"Reranking Average (4 datasets)":54.58,"Retrieval Average (15 datasets)":41.17,"STS Average (10 datasets)":74.23,"Summarization Average (1 datasets)":31.05} -{"index":239,"Rank":139,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":52.35,"Classification Average (12 datasets)":64.71,"Clustering Average (11 datasets)":37.64,"PairClassification Average (3 datasets)":81.74,"Reranking Average (4 datasets)":51.84,"Retrieval Average (15 datasets)":32.96,"STS Average (10 datasets)":76.47,"Summarization Average (1 datasets)":29.5} -{"index":173,"Rank":140,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":52.33,"Classification Average (12 datasets)":60.56,"Clustering Average (11 datasets)":32.56,"PairClassification Average (3 datasets)":79.22,"Reranking Average (4 datasets)":53.07,"Retrieval Average (15 datasets)":38.91,"STS Average (10 datasets)":78.06,"Summarization Average (1 datasets)":31.25} -{"index":80,"Rank":141,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":51.25,"Classification Average (12 datasets)":60.72,"Clustering Average (11 datasets)":35.79,"PairClassification Average (3 datasets)":75.23,"Reranking Average (4 datasets)":50.58,"Retrieval Average (15 datasets)":37.04,"STS Average (10 datasets)":73.41,"Summarization Average (1 datasets)":29.71} -{"index":72,"Rank":142,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":50.94,"Classification Average (12 datasets)":65.72,"Clustering Average (11 datasets)":32.91,"PairClassification Average (3 datasets)":80.62,"Reranking Average (4 datasets)":50.29,"Retrieval Average (15 datasets)":29.33,"STS Average (10 datasets)":78.81,"Summarization Average (1 datasets)":30.71} -{"index":277,"Rank":143,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":2046,"Average (56 datasets)":49.52,"Classification Average (12 datasets)":70.44,"Clustering Average (11 datasets)":37.52,"PairClassification Average (3 datasets)":76.86,"Reranking Average (4 datasets)":49.02,"Retrieval Average (15 datasets)":18.36,"STS Average (10 datasets)":78.6,"Summarization Average (1 datasets)":26.94} -{"index":67,"Rank":144,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Embedding Dimensions":2048,"Max Tokens":4096,"Average (56 datasets)":49.42,"Classification Average (12 datasets)":67.67,"Clustering Average (11 datasets)":37.45,"PairClassification Average (3 datasets)":72.21,"Reranking Average (4 datasets)":47.7,"Retrieval Average (15 datasets)":25.93,"STS Average (10 datasets)":71.61,"Summarization Average (1 datasets)":31.23} -{"index":217,"Rank":145,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":48.87,"Classification Average (12 datasets)":67.32,"Clustering Average (11 datasets)":33.43,"PairClassification Average (3 datasets)":73.68,"Reranking Average (4 datasets)":47.54,"Retrieval Average (15 datasets)":21.82,"STS Average (10 datasets)":79.12,"Summarization Average (1 datasets)":31.17} -{"index":70,"Rank":146,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":48.61,"Classification Average (12 datasets)":64.86,"Clustering Average (11 datasets)":30.21,"PairClassification Average (3 datasets)":78.57,"Reranking Average (4 datasets)":48.51,"Retrieval Average (15 datasets)":26.17,"STS Average (10 datasets)":75.9,"Summarization Average (1 datasets)":30.3} -{"index":71,"Rank":147,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":48.17,"Classification Average (12 datasets)":67.77,"Clustering Average (11 datasets)":28.01,"PairClassification Average (3 datasets)":80.57,"Reranking Average (4 datasets)":49.41,"Retrieval Average (15 datasets)":25.51,"STS Average (10 datasets)":72.43,"Summarization Average (1 datasets)":29.91} -{"index":81,"Rank":148,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":45.97,"Classification Average (12 datasets)":61.46,"Clustering Average (11 datasets)":30.95,"PairClassification Average (3 datasets)":71.78,"Reranking Average (4 datasets)":47.56,"Retrieval Average (15 datasets)":20.9,"STS Average (10 datasets)":74.71,"Summarization Average (1 datasets)":30.26} -{"index":218,"Rank":149,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":45.45,"Classification Average (12 datasets)":62.5,"Clustering Average (11 datasets)":29.04,"PairClassification Average (3 datasets)":70.33,"Reranking Average (4 datasets)":46.47,"Retrieval Average (15 datasets)":20.29,"STS Average (10 datasets)":74.33,"Summarization Average (1 datasets)":31.15} -{"index":227,"Rank":150,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":45.21,"Classification Average (12 datasets)":62.71,"Clustering Average (11 datasets)":29.55,"PairClassification Average (3 datasets)":78.87,"Reranking Average (4 datasets)":48.42,"Retrieval Average (15 datasets)":18.99,"STS Average (10 datasets)":70.8,"Summarization Average (1 datasets)":31.05} -{"index":233,"Rank":151,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":300,"Max Tokens":"N\/A","Average (56 datasets)":42.06,"Classification Average (12 datasets)":57.65,"Clustering Average (11 datasets)":26.57,"PairClassification Average (3 datasets)":72.94,"Reranking Average (4 datasets)":44.75,"Retrieval Average (15 datasets)":21.22,"STS Average (10 datasets)":62.46,"Summarization Average (1 datasets)":30.49} -{"index":141,"Rank":152,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":42.02,"Classification Average (12 datasets)":63.42,"Clustering Average (11 datasets)":26.2,"PairClassification Average (3 datasets)":71.4,"Reranking Average (4 datasets)":46.18,"Retrieval Average (15 datasets)":13.47,"STS Average (10 datasets)":67.23,"Summarization Average (1 datasets)":30.56} -{"index":232,"Rank":153,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Embedding Dimensions":300,"Max Tokens":"N\/A","Average (56 datasets)":41.96,"Classification Average (12 datasets)":57.29,"Clustering Average (11 datasets)":27.73,"PairClassification Average (3 datasets)":70.92,"Reranking Average (4 datasets)":43.29,"Retrieval Average (15 datasets)":21.62,"STS Average (10 datasets)":61.85,"Summarization Average (1 datasets)":28.87} -{"index":231,"Rank":154,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":40.28,"Classification Average (12 datasets)":52.37,"Clustering Average (11 datasets)":34.06,"PairClassification Average (3 datasets)":61.37,"Reranking Average (4 datasets)":48.1,"Retrieval Average (15 datasets)":15.88,"STS Average (10 datasets)":61.02,"Summarization Average (1 datasets)":27.66} -{"index":122,"Rank":155,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":38.33,"Classification Average (12 datasets)":61.66,"Clustering Average (11 datasets)":30.12,"PairClassification Average (3 datasets)":56.33,"Reranking Average (4 datasets)":43.44,"Retrieval Average (15 datasets)":10.59,"STS Average (10 datasets)":54.36,"Summarization Average (1 datasets)":29.82} -{"index":11,"Rank":156,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Embedding Dimensions":1024,"Max Tokens":"N\/A","Average (56 datasets)":34.95,"Classification Average (12 datasets)":53.18,"Clustering Average (11 datasets)":15.28,"PairClassification Average (3 datasets)":68.86,"Reranking Average (4 datasets)":41.44,"Retrieval Average (15 datasets)":7.94,"STS Average (10 datasets)":63.27,"Summarization Average (1 datasets)":26.8} -{"index":2,"Rank":157,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":8000,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":4,"Rank":159,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":1024,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":5,"Rank":160,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":16000,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":7,"Rank":161,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":16000,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":10,"Rank":162,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":32000,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":12,"Rank":163,"Model":"bm25s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":39.84,"STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":25,"Rank":166,"Model":"e5-R-mistral-7b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":58.65,"STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":30,"Rank":167,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":31.67,"PairClassification Average (3 datasets)":76.96,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":39,"Rank":172,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":40,"Rank":173,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":41,"Rank":174,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":59,"Rank":186,"Model":"paraphrase-multilingual-mpnet-base-v2-KE_Sieve<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":64.85,"Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":74.53,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":75.11,"Summarization Average (1 datasets)":""} -{"index":73,"Rank":187,"Model":"gte-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":62.81,"Clustering Average (11 datasets)":35.96,"PairClassification Average (3 datasets)":80.73,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":74,"Rank":188,"Model":"gte-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":67.47,"Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":75,"Rank":189,"Model":"gte-micro-v3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":67.47,"Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":76,"Rank":190,"Model":"gte-micro-v4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":68.13,"Clustering Average (11 datasets)":39.54,"PairClassification Average (3 datasets)":82.59,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":77,"Rank":191,"Model":"test24<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":63.54,"Clustering Average (11 datasets)":35.78,"PairClassification Average (3 datasets)":80.4,"Reranking Average (4 datasets)":51.68,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":76.56,"Summarization Average (1 datasets)":30.52} -{"index":78,"Rank":192,"Model":"test25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":64.56,"Clustering Average (11 datasets)":33.15,"PairClassification Average (3 datasets)":79.7,"Reranking Average (4 datasets)":50.55,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":75.37,"Summarization Average (1 datasets)":29.33} -{"index":93,"Rank":201,"Model":"e5-large-v2-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":75.24,"Clustering Average (11 datasets)":44.49,"PairClassification Average (3 datasets)":86.03,"Reranking Average (4 datasets)":56.61,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":82.05,"Summarization Average (1 datasets)":30.19} -{"index":97,"Rank":203,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":30.62} -{"index":98,"Rank":204,"Model":"yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":79.84,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":32.47} -{"index":109,"Rank":206,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Embedding Dimensions":2048,"Max Tokens":2048,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":111,"Rank":208,"Model":"UAE-Large-V1-Q5_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":75.58,"Clustering Average (11 datasets)":46.73,"PairClassification Average (3 datasets)":87.25,"Reranking Average (4 datasets)":59.88,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":84.54,"Summarization Average (1 datasets)":32.03} -{"index":114,"Rank":209,"Model":"bge-large-en-v1.5-Q4_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":75.97,"Clustering Average (11 datasets)":46.08,"PairClassification Average (3 datasets)":87.12,"Reranking Average (4 datasets)":60.03,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":83.11,"Summarization Average (1 datasets)":31.61} -{"index":120,"Rank":210,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":75.53,"Clustering Average (11 datasets)":45.77,"PairClassification Average (3 datasets)":86.55,"Reranking Average (4 datasets)":58.86,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":82.4,"Summarization Average (1 datasets)":31.07} -{"index":124,"Rank":211,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (56 datasets)":"","Classification Average (12 datasets)":73.13,"Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":84.48,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":127,"Rank":212,"Model":"cai-lunaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":47.85,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":70.81,"Summarization Average (1 datasets)":29.02} -{"index":128,"Rank":213,"Model":"cai-stellaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":61.44,"Clustering Average (11 datasets)":37.92,"PairClassification Average (3 datasets)":82.11,"Reranking Average (4 datasets)":55.24,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":130,"Rank":214,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":131,"Rank":215,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Embedding Dimensions":1024,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":132,"Rank":216,"Model":"embedder-100p<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":66.95,"Clustering Average (11 datasets)":40.83,"PairClassification Average (3 datasets)":81.03,"Reranking Average (4 datasets)":54.63,"Retrieval Average (15 datasets)":38.79,"STS Average (10 datasets)":80.17,"Summarization Average (1 datasets)":""} -{"index":133,"Rank":217,"Model":"mxbai-embed-large-v1-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":75.64,"Clustering Average (11 datasets)":46.71,"PairClassification Average (3 datasets)":87.2,"Reranking Average (4 datasets)":60.11,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":85.0,"Summarization Average (1 datasets)":32.71} -{"index":135,"Rank":218,"Model":"jina-embeddings-v2-base-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":73.45,"Clustering Average (11 datasets)":41.73,"PairClassification Average (3 datasets)":85.38,"Reranking Average (4 datasets)":56.98,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":80.7,"Summarization Average (1 datasets)":31.6} -{"index":136,"Rank":219,"Model":"jina-embeddings-v2-small-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":68.82,"Clustering Average (11 datasets)":40.08,"PairClassification Average (3 datasets)":84.44,"Reranking Average (4 datasets)":55.09,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":80.0,"Summarization Average (1 datasets)":30.56} -{"index":137,"Rank":220,"Model":"stella-base-en-v2-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":75.28,"Clustering Average (11 datasets)":44.9,"PairClassification Average (3 datasets)":86.45,"Reranking Average (4 datasets)":58.78,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":83.02,"Summarization Average (1 datasets)":32.52} -{"index":142,"Rank":221,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":143,"Rank":222,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":144,"Rank":223,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Embedding Dimensions":1024,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":145,"Rank":224,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":146,"Rank":225,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":150,"Rank":226,"Model":"bge-large-en-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":75.97,"Clustering Average (11 datasets)":46.08,"PairClassification Average (3 datasets)":87.12,"Reranking Average (4 datasets)":60.03,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":83.11,"Summarization Average (1 datasets)":31.61} -{"index":163,"Rank":227,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Embedding Dimensions":768,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":164,"Rank":228,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Embedding Dimensions":768,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":165,"Rank":229,"Model":"UAE-Large-V1-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":75.58,"Clustering Average (11 datasets)":46.73,"PairClassification Average (3 datasets)":87.25,"Reranking Average (4 datasets)":59.88,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":84.54,"Summarization Average (1 datasets)":32.03} -{"index":174,"Rank":230,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":67.98,"Clustering Average (11 datasets)":38.99,"PairClassification Average (3 datasets)":83.65,"Reranking Average (4 datasets)":54.64,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":82.61,"Summarization Average (1 datasets)":31.55} -{"index":176,"Rank":231,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":69.02,"Clustering Average (11 datasets)":39.29,"PairClassification Average (3 datasets)":84.56,"Reranking Average (4 datasets)":55.32,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":83.49,"Summarization Average (1 datasets)":29.87} -{"index":192,"Rank":237,"Model":"all-MiniLM-L6-v2-ds<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":63.67,"Clustering Average (11 datasets)":39.94,"PairClassification Average (3 datasets)":82.37,"Reranking Average (4 datasets)":58.04,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":198,"Rank":240,"Model":"bge-base-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":74.98,"Clustering Average (11 datasets)":45.06,"PairClassification Average (3 datasets)":86.24,"Reranking Average (4 datasets)":58.94,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":82.79,"Summarization Average (1 datasets)":""} -{"index":199,"Rank":241,"Model":"bge-base-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":72.62,"Clustering Average (11 datasets)":42.88,"PairClassification Average (3 datasets)":84.69,"Reranking Average (4 datasets)":57.18,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":81.19,"Summarization Average (1 datasets)":""} -{"index":200,"Rank":242,"Model":"bge-large-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":86.74,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":84.18,"Summarization Average (1 datasets)":""} -{"index":201,"Rank":243,"Model":"bge-large-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":85.06,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":82.5,"Summarization Average (1 datasets)":""} -{"index":202,"Rank":244,"Model":"bge-small-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":73.52,"Clustering Average (11 datasets)":42.63,"PairClassification Average (3 datasets)":85.05,"Reranking Average (4 datasets)":57.86,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":82.04,"Summarization Average (1 datasets)":""} -{"index":203,"Rank":245,"Model":"bge-small-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":68.55,"Clustering Average (11 datasets)":38.37,"PairClassification Average (3 datasets)":81.93,"Reranking Average (4 datasets)":54.14,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":78.53,"Summarization Average (1 datasets)":""} -{"index":216,"Rank":246,"Model":"NV-Retriever-v1<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":60.9,"STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":225,"Rank":252,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Embedding Dimensions":768,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":226,"Rank":253,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Embedding Dimensions":768,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":234,"Rank":254,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":512,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":64.67,"Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":77.61,"Reranking Average (4 datasets)":48.82,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":75.6,"Summarization Average (1 datasets)":""} -{"index":240,"Rank":255,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":241,"Rank":256,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":37.14,"PairClassification Average (3 datasets)":78.45,"Reranking Average (4 datasets)":53.62,"Retrieval Average (15 datasets)":32.45,"STS Average (10 datasets)":"","Summarization Average (1 datasets)":30.67} -{"index":242,"Rank":257,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":38.4,"PairClassification Average (3 datasets)":80.81,"Reranking Average (4 datasets)":53.8,"Retrieval Average (15 datasets)":35.34,"STS Average (10 datasets)":"","Summarization Average (1 datasets)":31.57} -{"index":248,"Rank":259,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":62.23,"Clustering Average (11 datasets)":30.61,"PairClassification Average (3 datasets)":76.49,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":80.24,"Summarization Average (1 datasets)":30.1} -{"index":250,"Rank":261,"Model":"ret-phi2-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":50.78,"STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":251,"Rank":262,"Model":"Angle_BERT<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":66.01,"Clustering Average (11 datasets)":33.47,"PairClassification Average (3 datasets)":77.75,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":255,"Rank":263,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":41.73,"Clustering Average (11 datasets)":18.0,"PairClassification Average (3 datasets)":30.61,"Reranking Average (4 datasets)":34.44,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":39.1,"Summarization Average (1 datasets)":""} -{"index":256,"Rank":264,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":61.37,"Clustering Average (11 datasets)":29.04,"PairClassification Average (3 datasets)":71.88,"Reranking Average (4 datasets)":47.31,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":65.8,"Summarization Average (1 datasets)":29.51} -{"index":257,"Rank":265,"Model":"universal-sentence-encoder-4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":64.45,"Clustering Average (11 datasets)":35.71,"PairClassification Average (3 datasets)":76.23,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":72.04,"Summarization Average (1 datasets)":29.42} -{"index":258,"Rank":266,"Model":"universal-sentence-encoder-large-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":67.9,"Clustering Average (11 datasets)":37.82,"PairClassification Average (3 datasets)":79.53,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":74.05,"Summarization Average (1 datasets)":29.01} -{"index":259,"Rank":267,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":512,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":63.42,"Clustering Average (11 datasets)":34.82,"PairClassification Average (3 datasets)":75.43,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":75.39,"Summarization Average (1 datasets)":30.79} -{"index":260,"Rank":268,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":512,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":65.78,"Clustering Average (11 datasets)":35.06,"PairClassification Average (3 datasets)":79.62,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":75.35,"Summarization Average (1 datasets)":29.71} -{"index":262,"Rank":269,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":43.43,"PairClassification Average (3 datasets)":82.83,"Reranking Average (4 datasets)":54.29,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":30.57} -{"index":263,"Rank":270,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":0.41,"PairClassification Average (3 datasets)":0.79,"Reranking Average (4 datasets)":0.53,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":0.29} -{"index":264,"Rank":271,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":265,"Rank":272,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":267,"Rank":274,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":40.54,"PairClassification Average (3 datasets)":85.41,"Reranking Average (4 datasets)":54.46,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":80.59,"Summarization Average (1 datasets)":29.78} -{"index":268,"Rank":275,"Model":"gte-large-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":84.69,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":82.88,"Summarization Average (1 datasets)":""} -{"index":269,"Rank":276,"Model":"gte-large-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":83.58,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":81.9,"Summarization Average (1 datasets)":""} -{"index":270,"Rank":277,"Model":"gte-small-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":71.7,"Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":83.09,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":81.5,"Summarization Average (1 datasets)":""} -{"index":271,"Rank":278,"Model":"bi-cse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":79.66,"Summarization Average (1 datasets)":""} -{"index":272,"Rank":279,"Model":"text-search-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":2046,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":273,"Rank":280,"Model":"text-search-ada-doc-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":2046,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":274,"Rank":281,"Model":"text-search-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":2048,"Max Tokens":2046,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":275,"Rank":282,"Model":"text-search-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":4096,"Max Tokens":2046,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":276,"Rank":283,"Model":"text-search-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":12288,"Max Tokens":2046,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":278,"Rank":284,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":2048,"Max Tokens":2046,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":77.46,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":279,"Rank":285,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":4096,"Max Tokens":2046,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":77.79,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} -{"index":280,"Rank":286,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":12288,"Max Tokens":2046,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":75.9,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":1,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":16000,"Average (56 datasets)":66.28,"Classification Average (12 datasets)":82.44,"Clustering Average (11 datasets)":53.35,"PairClassification Average (3 datasets)":89.24,"Reranking Average (4 datasets)":60.09,"Retrieval Average (15 datasets)":58.28,"STS Average (10 datasets)":85.84,"Summarization Average (1 datasets)":30.84} +{"Rank":2,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (56 datasets)":65.31,"Classification Average (12 datasets)":78.87,"Clustering Average (11 datasets)":55.83,"PairClassification Average (3 datasets)":87.38,"Reranking Average (4 datasets)":60.13,"Retrieval Average (15 datasets)":56.24,"STS Average (10 datasets)":83.75,"Summarization Average (1 datasets)":31.46} +{"Rank":3,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Embedding Dimensions":1024,"Max Tokens":4000,"Average (56 datasets)":65.0,"Classification Average (12 datasets)":79.28,"Clustering Average (11 datasets)":52.42,"PairClassification Average (3 datasets)":86.87,"Reranking Average (4 datasets)":58.24,"Retrieval Average (15 datasets)":56.6,"STS Average (10 datasets)":86.75,"Summarization Average (1 datasets)":31.01} +{"Rank":4,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Embedding Dimensions":768,"Max Tokens":2048,"Average (56 datasets)":64.12,"Classification Average (12 datasets)":83.05,"Clustering Average (11 datasets)":47.48,"PairClassification Average (3 datasets)":87.61,"Reranking Average (4 datasets)":58.9,"Retrieval Average (15 datasets)":55.7,"STS Average (10 datasets)":86.32,"Summarization Average (1 datasets)":32.63} +{"Rank":5,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":4000,"Average (56 datasets)":62.87,"Classification Average (12 datasets)":76.12,"Clustering Average (11 datasets)":47.4,"PairClassification Average (3 datasets)":86.57,"Reranking Average (4 datasets)":59.74,"Retrieval Average (15 datasets)":55.58,"STS Average (10 datasets)":84.56,"Summarization Average (1 datasets)":30.97} +{"Rank":6,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Embedding Dimensions":4096,"Max Tokens":8192,"Average (56 datasets)":62.77,"Classification Average (12 datasets)":73.46,"Clustering Average (11 datasets)":46.45,"PairClassification Average (3 datasets)":87.79,"Reranking Average (4 datasets)":59.68,"Retrieval Average (15 datasets)":56.63,"STS Average (10 datasets)":84.82,"Summarization Average (1 datasets)":30.94} +{"Rank":7,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":3072,"Max Tokens":8191,"Average (56 datasets)":62.67,"Classification Average (12 datasets)":75.49,"Clustering Average (11 datasets)":49.01,"PairClassification Average (3 datasets)":85.72,"Reranking Average (4 datasets)":59.16,"Retrieval Average (15 datasets)":55.44,"STS Average (10 datasets)":82.61,"Summarization Average (1 datasets)":29.92} +{"Rank":8,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (56 datasets)":62.54,"Classification Average (12 datasets)":75.05,"Clustering Average (11 datasets)":45.54,"PairClassification Average (3 datasets)":87.99,"Reranking Average (4 datasets)":58.42,"Retrieval Average (15 datasets)":55.99,"STS Average (10 datasets)":85.38,"Summarization Average (1 datasets)":29.96} +{"Rank":9,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Embedding Dimensions":256,"Max Tokens":2048,"Average (56 datasets)":62.3,"Classification Average (12 datasets)":81.98,"Clustering Average (11 datasets)":45.07,"PairClassification Average (3 datasets)":87.25,"Reranking Average (4 datasets)":57.78,"Retrieval Average (15 datasets)":52.44,"STS Average (10 datasets)":86.36,"Summarization Average (1 datasets)":32.36} +{"Rank":10,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Embedding Dimensions":4096,"Max Tokens":4096,"Average (56 datasets)":61.78,"Classification Average (12 datasets)":74.58,"Clustering Average (11 datasets)":45.24,"PairClassification Average (3 datasets)":88.03,"Reranking Average (4 datasets)":57.38,"Retrieval Average (15 datasets)":54.6,"STS Average (10 datasets)":84.89,"Summarization Average (1 datasets)":28.49} +{"Rank":11,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":8191,"Average (56 datasets)":60.3,"Classification Average (12 datasets)":73.63,"Clustering Average (11 datasets)":46.65,"PairClassification Average (3 datasets)":85.04,"Reranking Average (4 datasets)":56.72,"Retrieval Average (15 datasets)":51.08,"STS Average (10 datasets)":82.49,"Summarization Average (1 datasets)":31.12} +{"Rank":12,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":256,"Max Tokens":8191,"Average (56 datasets)":60.17,"Classification Average (12 datasets)":72.29,"Clustering Average (11 datasets)":46.23,"PairClassification Average (3 datasets)":84.22,"Reranking Average (4 datasets)":57.99,"Retrieval Average (15 datasets)":51.66,"STS Average (10 datasets)":82.05,"Summarization Average (1 datasets)":29.92} +{"Rank":13,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":512,"Max Tokens":8192,"Average (56 datasets)":59.98,"Classification Average (12 datasets)":73.48,"Clustering Average (11 datasets)":43.71,"PairClassification Average (3 datasets)":84.59,"Reranking Average (4 datasets)":55.65,"Retrieval Average (15 datasets)":52.4,"STS Average (10 datasets)":83.07,"Summarization Average (1 datasets)":30.47} +{"Rank":14,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Embedding Dimensions":2048,"Max Tokens":4096,"Average (56 datasets)":59.6,"Classification Average (12 datasets)":70.41,"Clustering Average (11 datasets)":43.57,"PairClassification Average (3 datasets)":86.21,"Reranking Average (4 datasets)":55.38,"Retrieval Average (15 datasets)":51.44,"STS Average (10 datasets)":84.68,"Summarization Average (1 datasets)":30.01} +{"Rank":15,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":256,"Max Tokens":8192,"Average (56 datasets)":59.1,"Classification Average (12 datasets)":72.5,"Clustering Average (11 datasets)":43.16,"PairClassification Average (3 datasets)":84.09,"Reranking Average (4 datasets)":55.18,"Retrieval Average (15 datasets)":50.81,"STS Average (10 datasets)":82.72,"Summarization Average (1 datasets)":30.05} +{"Rank":16,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":8191,"Average (56 datasets)":59.04,"Classification Average (12 datasets)":71.37,"Clustering Average (11 datasets)":45.9,"PairClassification Average (3 datasets)":84.89,"Reranking Average (4 datasets)":56.32,"Retrieval Average (15 datasets)":49.25,"STS Average (10 datasets)":81.42,"Summarization Average (1 datasets)":30.8} +{"Rank":17,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":128,"Max Tokens":8192,"Average (56 datasets)":57.45,"Classification Average (12 datasets)":70.24,"Clustering Average (11 datasets)":42.24,"PairClassification Average (3 datasets)":83.54,"Reranking Average (4 datasets)":54.56,"Retrieval Average (15 datasets)":47.75,"STS Average (10 datasets)":82.11,"Summarization Average (1 datasets)":29.59} +{"Rank":18,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":57.14,"Classification Average (12 datasets)":74.32,"Clustering Average (11 datasets)":43.72,"PairClassification Average (3 datasets)":85.06,"Reranking Average (4 datasets)":56.42,"Retrieval Average (15 datasets)":42.24,"STS Average (10 datasets)":83.94,"Summarization Average (1 datasets)":30.08} +{"Rank":19,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":57.02,"Classification Average (12 datasets)":65.73,"Clustering Average (11 datasets)":42.42,"PairClassification Average (3 datasets)":86.12,"Reranking Average (4 datasets)":56.66,"Retrieval Average (15 datasets)":48.48,"STS Average (10 datasets)":79.11,"Summarization Average (1 datasets)":30.64} +{"Rank":20,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":56.41,"Classification Average (12 datasets)":65.47,"Clustering Average (11 datasets)":41.51,"PairClassification Average (3 datasets)":86.13,"Reranking Average (4 datasets)":55.96,"Retrieval Average (15 datasets)":47.96,"STS Average (10 datasets)":78.31,"Summarization Average (1 datasets)":30.21} +{"Rank":21,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":56.34,"Classification Average (12 datasets)":65.84,"Clustering Average (11 datasets)":41.6,"PairClassification Average (3 datasets)":85.32,"Reranking Average (4 datasets)":55.36,"Retrieval Average (15 datasets)":47.42,"STS Average (10 datasets)":79.21,"Summarization Average (1 datasets)":29.5} +{"Rank":22,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average (56 datasets)":55.72,"Classification Average (12 datasets)":63.06,"Clustering Average (11 datasets)":43.69,"PairClassification Average (3 datasets)":83.04,"Reranking Average (4 datasets)":59.36,"Retrieval Average (15 datasets)":43.81,"STS Average (10 datasets)":80.53,"Summarization Average (1 datasets)":27.49} +{"Rank":23,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":55.39,"Classification Average (12 datasets)":74.28,"Clustering Average (11 datasets)":42.34,"PairClassification Average (3 datasets)":86.06,"Reranking Average (4 datasets)":54.71,"Retrieval Average (15 datasets)":38.47,"STS Average (10 datasets)":82.92,"Summarization Average (1 datasets)":29.91} +{"Rank":24,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":54.59,"Classification Average (12 datasets)":74.02,"Clustering Average (11 datasets)":41.65,"PairClassification Average (3 datasets)":84.97,"Reranking Average (4 datasets)":54.0,"Retrieval Average (15 datasets)":36.71,"STS Average (10 datasets)":83.36,"Summarization Average (1 datasets)":29.64} +{"Rank":25,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average (56 datasets)":54.56,"Classification Average (12 datasets)":61.0,"Clustering Average (11 datasets)":41.81,"PairClassification Average (3 datasets)":82.41,"Reranking Average (4 datasets)":58.44,"Retrieval Average (15 datasets)":42.69,"STS Average (10 datasets)":80.47,"Summarization Average (1 datasets)":27.9} +{"Rank":26,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":64,"Max Tokens":8192,"Average (56 datasets)":54.16,"Classification Average (12 datasets)":67.58,"Clustering Average (11 datasets)":40.31,"PairClassification Average (3 datasets)":82.26,"Reranking Average (4 datasets)":53.42,"Retrieval Average (15 datasets)":40.92,"STS Average (10 datasets)":81.04,"Summarization Average (1 datasets)":28.41} +{"Rank":27,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (56 datasets)":54.12,"Classification Average (12 datasets)":60.5,"Clustering Average (11 datasets)":41.94,"PairClassification Average (3 datasets)":82.37,"Reranking Average (4 datasets)":58.04,"Retrieval Average (15 datasets)":41.95,"STS Average (10 datasets)":79.27,"Summarization Average (1 datasets)":30.81} +{"Rank":28,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":53.95,"Classification Average (12 datasets)":62.98,"Clustering Average (11 datasets)":38.63,"PairClassification Average (3 datasets)":83.85,"Reranking Average (4 datasets)":54.23,"Retrieval Average (15 datasets)":44.67,"STS Average (10 datasets)":77.35,"Summarization Average (1 datasets)":29.67} +{"Rank":29,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":30522,"Max Tokens":512,"Average (56 datasets)":53.9,"Classification Average (12 datasets)":62.89,"Clustering Average (11 datasets)":32.28,"PairClassification Average (3 datasets)":81.32,"Reranking Average (4 datasets)":53.27,"Retrieval Average (15 datasets)":50.02,"STS Average (10 datasets)":77.03,"Summarization Average (1 datasets)":31.03} +{"Rank":30,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (56 datasets)":53.54,"Classification Average (12 datasets)":71.65,"Clustering Average (11 datasets)":40.63,"PairClassification Average (3 datasets)":80.94,"Reranking Average (4 datasets)":53.98,"Retrieval Average (15 datasets)":38.05,"STS Average (10 datasets)":79.19,"Summarization Average (1 datasets)":30.19} +{"Rank":31,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":53.53,"Classification Average (12 datasets)":64.05,"Clustering Average (11 datasets)":41.1,"PairClassification Average (3 datasets)":82.54,"Reranking Average (4 datasets)":53.14,"Retrieval Average (15 datasets)":41.88,"STS Average (10 datasets)":76.77,"Summarization Average (1 datasets)":30.36} +{"Rank":32,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Embedding Dimensions":4096,"Max Tokens":8192,"Average (56 datasets)":53.21,"Classification Average (12 datasets)":69.53,"Clustering Average (11 datasets)":41.99,"PairClassification Average (3 datasets)":78.01,"Reranking Average (4 datasets)":53.09,"Retrieval Average (15 datasets)":39.19,"STS Average (10 datasets)":76.16,"Summarization Average (1 datasets)":31.45} +{"Rank":33,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":52.63,"Classification Average (12 datasets)":70.2,"Clustering Average (11 datasets)":40.21,"PairClassification Average (3 datasets)":85.18,"Reranking Average (4 datasets)":53.09,"Retrieval Average (15 datasets)":33.63,"STS Average (10 datasets)":82.4,"Summarization Average (1 datasets)":31.39} +{"Rank":34,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Embedding Dimensions":4096,"Max Tokens":4096,"Average (56 datasets)":52.21,"Classification Average (12 datasets)":69.16,"Clustering Average (11 datasets)":40.83,"PairClassification Average (3 datasets)":77.88,"Reranking Average (4 datasets)":52.95,"Retrieval Average (15 datasets)":36.75,"STS Average (10 datasets)":76.73,"Summarization Average (1 datasets)":31.38} +{"Rank":35,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (56 datasets)":51.93,"Classification Average (12 datasets)":65.42,"Clustering Average (11 datasets)":38.5,"PairClassification Average (3 datasets)":80.81,"Reranking Average (4 datasets)":53.8,"Retrieval Average (15 datasets)":35.34,"STS Average (10 datasets)":82.15,"Summarization Average (1 datasets)":31.57} +{"Rank":36,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (56 datasets)":49.8,"Classification Average (12 datasets)":62.33,"Clustering Average (11 datasets)":37.14,"PairClassification Average (3 datasets)":78.45,"Reranking Average (4 datasets)":53.62,"Retrieval Average (15 datasets)":32.45,"STS Average (10 datasets)":80.08,"Summarization Average (1 datasets)":30.67} +{"Rank":37,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":49.43,"Classification Average (12 datasets)":61.84,"Clustering Average (11 datasets)":37.64,"PairClassification Average (3 datasets)":81.74,"Reranking Average (4 datasets)":51.84,"Retrieval Average (15 datasets)":32.96,"STS Average (10 datasets)":76.41,"Summarization Average (1 datasets)":29.5} +{"Rank":38,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":2046,"Average (56 datasets)":46.04,"Classification Average (12 datasets)":72.32,"Clustering Average (11 datasets)":37.52,"PairClassification Average (3 datasets)":76.86,"Reranking Average (4 datasets)":49.02,"Retrieval Average (15 datasets)":18.36,"STS Average (10 datasets)":79.27,"Summarization Average (1 datasets)":26.94} +{"Rank":39,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Embedding Dimensions":2048,"Max Tokens":4096,"Average (56 datasets)":45.89,"Classification Average (12 datasets)":65.42,"Clustering Average (11 datasets)":37.45,"PairClassification Average (3 datasets)":72.21,"Reranking Average (4 datasets)":47.7,"Retrieval Average (15 datasets)":25.93,"STS Average (10 datasets)":71.34,"Summarization Average (1 datasets)":31.23} +{"Rank":40,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":45.53,"Classification Average (12 datasets)":68.06,"Clustering Average (11 datasets)":33.43,"PairClassification Average (3 datasets)":73.68,"Reranking Average (4 datasets)":47.54,"Retrieval Average (15 datasets)":21.82,"STS Average (10 datasets)":79.97,"Summarization Average (1 datasets)":31.17} +{"Rank":41,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":42.37,"Classification Average (12 datasets)":63.68,"Clustering Average (11 datasets)":29.04,"PairClassification Average (3 datasets)":70.33,"Reranking Average (4 datasets)":46.47,"Retrieval Average (15 datasets)":20.29,"STS Average (10 datasets)":75.01,"Summarization Average (1 datasets)":31.15} +{"Rank":42,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":41.72,"Classification Average (12 datasets)":60.63,"Clustering Average (11 datasets)":29.55,"PairClassification Average (3 datasets)":78.87,"Reranking Average (4 datasets)":48.42,"Retrieval Average (15 datasets)":18.99,"STS Average (10 datasets)":70.95,"Summarization Average (1 datasets)":31.05} +{"Rank":43,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":300,"Max Tokens":"N\/A","Average (56 datasets)":39.04,"Classification Average (12 datasets)":56.87,"Clustering Average (11 datasets)":26.57,"PairClassification Average (3 datasets)":72.94,"Reranking Average (4 datasets)":44.75,"Retrieval Average (15 datasets)":21.22,"STS Average (10 datasets)":61.73,"Summarization Average (1 datasets)":30.49} +{"Rank":44,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Embedding Dimensions":300,"Max Tokens":"N\/A","Average (56 datasets)":39.0,"Classification Average (12 datasets)":57.28,"Clustering Average (11 datasets)":27.73,"PairClassification Average (3 datasets)":70.92,"Reranking Average (4 datasets)":43.29,"Retrieval Average (15 datasets)":21.62,"STS Average (10 datasets)":60.52,"Summarization Average (1 datasets)":28.87} +{"Rank":45,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":37.73,"Classification Average (12 datasets)":51.43,"Clustering Average (11 datasets)":34.06,"PairClassification Average (3 datasets)":61.37,"Reranking Average (4 datasets)":48.1,"Retrieval Average (15 datasets)":15.88,"STS Average (10 datasets)":60.68,"Summarization Average (1 datasets)":27.66} +{"Rank":46,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":34.23,"Classification Average (12 datasets)":59.53,"Clustering Average (11 datasets)":30.12,"PairClassification Average (3 datasets)":56.33,"Reranking Average (4 datasets)":43.44,"Retrieval Average (15 datasets)":10.59,"STS Average (10 datasets)":52.89,"Summarization Average (1 datasets)":29.82} +{"Rank":47,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Embedding Dimensions":1024,"Max Tokens":"N\/A","Average (56 datasets)":31.38,"Classification Average (12 datasets)":50.66,"Clustering Average (11 datasets)":15.28,"PairClassification Average (3 datasets)":68.86,"Reranking Average (4 datasets)":41.44,"Retrieval Average (15 datasets)":7.94,"STS Average (10 datasets)":64.53,"Summarization Average (1 datasets)":26.8} +{"Rank":48,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":8000,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":50,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":1024,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":51,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":16000,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":52,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":16000,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":53,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":32000,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":54,"Model":"bm25s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":39.84,"STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":55,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":56,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":57,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":384,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":58,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":59,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":60,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":61,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":62,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":63,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":64,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":65,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Embedding Dimensions":"","Max Tokens":4096,"Average (56 datasets)":"","Classification Average (12 datasets)":69.69,"Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":82.95,"Reranking Average (4 datasets)":57.09,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":79.59,"Summarization Average (1 datasets)":30.26} +{"Rank":66,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Embedding Dimensions":2048,"Max Tokens":2048,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":67,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":68,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":69,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":70,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Embedding Dimensions":512,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":71,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":2048,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":72,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":73,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Embedding Dimensions":1024,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":74,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":75,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":76,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":77,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":78,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":79,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Embedding Dimensions":1024,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":80,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":81,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":82,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":83,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Embedding Dimensions":1024,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":84,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":85,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":86,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":70.4,"Clustering Average (11 datasets)":39.62,"PairClassification Average (3 datasets)":83.57,"Reranking Average (4 datasets)":55.03,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":81.62,"Summarization Average (1 datasets)":30.23} +{"Rank":87,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":72.63,"Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":84.75,"Reranking Average (4 datasets)":55.96,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":83.05,"Summarization Average (1 datasets)":29.65} +{"Rank":88,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":67.98,"Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":82.65,"Reranking Average (4 datasets)":52.9,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":80.48,"Summarization Average (1 datasets)":30.04} +{"Rank":89,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Embedding Dimensions":768,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":90,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Embedding Dimensions":768,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":91,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":2048,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":92,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":2048,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":93,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Embedding Dimensions":768,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":94,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Embedding Dimensions":768,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":95,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":512,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":61.68,"Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":77.61,"Reranking Average (4 datasets)":48.82,"Retrieval Average (15 datasets)":"","STS Average (10 datasets)":75.86,"Summarization Average (1 datasets)":""} +{"Rank":96,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":97,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":98,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":312,"Max Tokens":2048,"Average (56 datasets)":"","Classification Average (12 datasets)":54.86,"Clustering Average (11 datasets)":23.75,"PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":100,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":512,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":101,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":512,"Max Tokens":512,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":102,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":103,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":104,"Model":"text-search-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":2046,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":105,"Model":"text-search-ada-doc-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":2046,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":106,"Model":"text-search-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":2048,"Max Tokens":2046,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":107,"Model":"text-search-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":4096,"Max Tokens":2046,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":108,"Model":"text-search-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":12288,"Max Tokens":2046,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":109,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":2048,"Max Tokens":2046,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":77.46,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":110,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":4096,"Max Tokens":2046,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":77.79,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":111,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":12288,"Max Tokens":2046,"Average (56 datasets)":"","Classification Average (12 datasets)":"","Clustering Average (11 datasets)":"","PairClassification Average (3 datasets)":75.9,"Reranking Average (4 datasets)":"","Retrieval Average (15 datasets)":"","STS Average (10 datasets)":"","Summarization Average (1 datasets)":""} diff --git a/boards_data/en/data_tasks/Classification/default.jsonl b/boards_data/en/data_tasks/Classification/default.jsonl index d4cf179038329d9ef807eae306bb8a0458dc5e94..2a158b5b2761bd0aaa7ec8348604f4ffc3004b5f 100644 --- a/boards_data/en/data_tasks/Classification/default.jsonl +++ b/boards_data/en/data_tasks/Classification/default.jsonl @@ -1,209 +1,111 @@ -{"level_0":0,"index":95,"Rank":1,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":89.05,"AmazonCounterfactualClassification (en)":92.72,"AmazonPolarityClassification":97.31,"AmazonReviewsClassification (en)":61.04,"Banking77Classification":90.02,"EmotionClassification":93.37,"ImdbClassification":96.8,"MassiveIntentClassification (en)":85.97,"MassiveScenarioClassification (en)":90.61,"MTOPDomainClassification (en)":98.58,"MTOPIntentClassification (en)":91.3,"ToxicConversationsClassification":91.14,"TweetSentimentExtractionClassification":79.7} -{"level_0":1,"index":21,"Rank":2,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.95,"AmazonCounterfactualClassification (en)":93.15,"AmazonPolarityClassification":96.98,"AmazonReviewsClassification (en)":61.46,"Banking77Classification":91.49,"EmotionClassification":93.36,"ImdbClassification":96.91,"MassiveIntentClassification (en)":82.93,"MassiveScenarioClassification (en)":85.6,"MTOPDomainClassification (en)":98.42,"MTOPIntentClassification (en)":94.0,"ToxicConversationsClassification":93.17,"TweetSentimentExtractionClassification":79.93} -{"level_0":2,"index":23,"Rank":3,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.08,"AmazonCounterfactualClassification (en)":89.48,"AmazonPolarityClassification":96.9,"AmazonReviewsClassification (en)":61.6,"Banking77Classification":92.53,"EmotionClassification":92.97,"ImdbClassification":96.66,"MassiveIntentClassification (en)":82.05,"MassiveScenarioClassification (en)":84.4,"MTOPDomainClassification (en)":98.61,"MTOPIntentClassification (en)":95.51,"ToxicConversationsClassification":87.34,"TweetSentimentExtractionClassification":78.86} -{"level_0":3,"index":51,"Rank":4,"Model":"neural-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.91,"AmazonCounterfactualClassification (en)":93.1,"AmazonPolarityClassification":97.54,"AmazonReviewsClassification (en)":61.17,"Banking77Classification":88.73,"EmotionClassification":91.36,"ImdbClassification":96.92,"MassiveIntentClassification (en)":82.5,"MassiveScenarioClassification (en)":84.5,"MTOPDomainClassification (en)":99.03,"MTOPIntentClassification (en)":90.94,"ToxicConversationsClassification":91.17,"TweetSentimentExtractionClassification":77.93} -{"level_0":4,"index":138,"Rank":5,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.63,"AmazonCounterfactualClassification (en)":92.87,"AmazonPolarityClassification":97.16,"AmazonReviewsClassification (en)":59.36,"Banking77Classification":89.79,"EmotionClassification":84.29,"ImdbClassification":96.66,"MassiveIntentClassification (en)":85.83,"MassiveScenarioClassification (en)":90.2,"MTOPDomainClassification (en)":99.01,"MTOPIntentClassification (en)":92.78,"ToxicConversationsClassification":88.76,"TweetSentimentExtractionClassification":74.84} -{"level_0":5,"index":215,"Rank":6,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Average":87.35,"AmazonCounterfactualClassification (en)":95.12,"AmazonPolarityClassification":97.14,"AmazonReviewsClassification (en)":55.47,"Banking77Classification":90.34,"EmotionClassification":91.7,"ImdbClassification":97.06,"MassiveIntentClassification (en)":80.07,"MassiveScenarioClassification (en)":81.74,"MTOPDomainClassification (en)":96.51,"MTOPIntentClassification (en)":89.77,"ToxicConversationsClassification":92.6,"TweetSentimentExtractionClassification":80.64} -{"level_0":6,"index":139,"Rank":7,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.67,"AmazonCounterfactualClassification (en)":92.36,"AmazonPolarityClassification":97.19,"AmazonReviewsClassification (en)":59.53,"Banking77Classification":89.3,"EmotionClassification":78.77,"ImdbClassification":96.49,"MassiveIntentClassification (en)":85.17,"MassiveScenarioClassification (en)":89.62,"MTOPDomainClassification (en)":98.83,"MTOPIntentClassification (en)":92.3,"ToxicConversationsClassification":86.94,"TweetSentimentExtractionClassification":73.58} -{"level_0":7,"index":205,"Rank":8,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.58,"AmazonCounterfactualClassification (en)":91.31,"AmazonPolarityClassification":97.5,"AmazonReviewsClassification (en)":62.56,"Banking77Classification":87.57,"EmotionClassification":79.45,"ImdbClassification":96.75,"MassiveIntentClassification (en)":85.41,"MassiveScenarioClassification (en)":89.77,"MTOPDomainClassification (en)":99.04,"MTOPIntentClassification (en)":91.88,"ToxicConversationsClassification":85.12,"TweetSentimentExtractionClassification":72.58} -{"level_0":8,"index":17,"Rank":9,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":86.58,"AmazonCounterfactualClassification (en)":91.31,"AmazonPolarityClassification":97.5,"AmazonReviewsClassification (en)":62.56,"Banking77Classification":87.57,"EmotionClassification":79.45,"ImdbClassification":96.75,"MassiveIntentClassification (en)":85.41,"MassiveScenarioClassification (en)":89.77,"MTOPDomainClassification (en)":99.04,"MTOPIntentClassification (en)":91.88,"ToxicConversationsClassification":85.12,"TweetSentimentExtractionClassification":72.58} -{"level_0":9,"index":126,"Rank":10,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.58,"AmazonCounterfactualClassification (en)":91.31,"AmazonPolarityClassification":97.5,"AmazonReviewsClassification (en)":62.56,"Banking77Classification":87.57,"EmotionClassification":79.45,"ImdbClassification":96.75,"MassiveIntentClassification (en)":85.41,"MassiveScenarioClassification (en)":89.77,"MTOPDomainClassification (en)":99.04,"MTOPIntentClassification (en)":91.88,"ToxicConversationsClassification":85.12,"TweetSentimentExtractionClassification":72.58} -{"level_0":10,"index":16,"Rank":11,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.47,"AmazonCounterfactualClassification (en)":83.99,"AmazonPolarityClassification":96.61,"AmazonReviewsClassification (en)":55.61,"Banking77Classification":87.31,"EmotionClassification":61.37,"ImdbClassification":95.83,"MassiveIntentClassification (en)":82.4,"MassiveScenarioClassification (en)":84.5,"MTOPDomainClassification (en)":97.69,"MTOPIntentClassification (en)":88.76,"ToxicConversationsClassification":82.66,"TweetSentimentExtractionClassification":72.95} -{"level_0":11,"index":204,"Rank":12,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.47,"AmazonCounterfactualClassification (en)":83.99,"AmazonPolarityClassification":96.61,"AmazonReviewsClassification (en)":55.61,"Banking77Classification":87.31,"EmotionClassification":61.37,"ImdbClassification":95.83,"MassiveIntentClassification (en)":82.4,"MassiveScenarioClassification (en)":84.5,"MTOPDomainClassification (en)":97.69,"MTOPIntentClassification (en)":88.76,"ToxicConversationsClassification":82.66,"TweetSentimentExtractionClassification":72.95} -{"level_0":12,"index":6,"Rank":13,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.49,"AmazonCounterfactualClassification (en)":77.6,"AmazonPolarityClassification":96.58,"AmazonReviewsClassification (en)":50.77,"Banking77Classification":86.96,"EmotionClassification":59.81,"ImdbClassification":96.13,"MassiveIntentClassification (en)":81.08,"MassiveScenarioClassification (en)":87.95,"MTOPDomainClassification (en)":98.86,"MTOPIntentClassification (en)":86.97,"ToxicConversationsClassification":83.58,"TweetSentimentExtractionClassification":71.55} -{"level_0":13,"index":1,"Rank":14,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":81.17,"AmazonCounterfactualClassification (en)":75.34,"AmazonPolarityClassification":97.34,"AmazonReviewsClassification (en)":51.17,"Banking77Classification":88.62,"EmotionClassification":52.51,"ImdbClassification":95.65,"MassiveIntentClassification (en)":80.22,"MassiveScenarioClassification (en)":87.19,"MTOPDomainClassification (en)":98.35,"MTOPIntentClassification (en)":83.43,"ToxicConversationsClassification":89.67,"TweetSentimentExtractionClassification":74.52} -{"level_0":14,"index":58,"Rank":15,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":80.2,"AmazonCounterfactualClassification (en)":84.43,"AmazonPolarityClassification":95.7,"AmazonReviewsClassification (en)":57.64,"Banking77Classification":87.88,"EmotionClassification":51.82,"ImdbClassification":94.78,"MassiveIntentClassification (en)":82.67,"MassiveScenarioClassification (en)":85.01,"MTOPDomainClassification (en)":96.83,"MTOPIntentClassification (en)":89.57,"ToxicConversationsClassification":71.29,"TweetSentimentExtractionClassification":64.76} -{"level_0":15,"index":15,"Rank":16,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":79.6,"AmazonCounterfactualClassification (en)":83.16,"AmazonPolarityClassification":96.7,"AmazonReviewsClassification (en)":62.17,"Banking77Classification":81.68,"EmotionClassification":54.53,"ImdbClassification":95.58,"MassiveIntentClassification (en)":78.47,"MassiveScenarioClassification (en)":78.19,"MTOPDomainClassification (en)":95.75,"MTOPIntentClassification (en)":84.26,"ToxicConversationsClassification":78.75,"TweetSentimentExtractionClassification":66.0} -{"level_0":16,"index":9,"Rank":17,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":79.25,"AmazonCounterfactualClassification (en)":88.31,"AmazonPolarityClassification":96.32,"AmazonReviewsClassification (en)":56.25,"Banking77Classification":88.59,"EmotionClassification":50.28,"ImdbClassification":95.75,"MassiveIntentClassification (en)":73.97,"MassiveScenarioClassification (en)":83.99,"MTOPDomainClassification (en)":97.65,"MTOPIntentClassification (en)":75.16,"ToxicConversationsClassification":81.75,"TweetSentimentExtractionClassification":62.98} -{"level_0":17,"index":0,"Rank":18,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":79.0,"AmazonCounterfactualClassification (en)":70.93,"AmazonPolarityClassification":97.34,"AmazonReviewsClassification (en)":48.47,"Banking77Classification":86.01,"EmotionClassification":51.53,"ImdbClassification":95.7,"MassiveIntentClassification (en)":75.67,"MassiveScenarioClassification (en)":85.16,"MTOPDomainClassification (en)":98.02,"MTOPIntentClassification (en)":77.82,"ToxicConversationsClassification":88.33,"TweetSentimentExtractionClassification":72.97} -{"level_0":18,"index":43,"Rank":19,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.53,"AmazonCounterfactualClassification (en)":80.48,"AmazonPolarityClassification":96.32,"AmazonReviewsClassification (en)":57.18,"Banking77Classification":87.46,"EmotionClassification":50.06,"ImdbClassification":94.32,"MassiveIntentClassification (en)":79.72,"MassiveScenarioClassification (en)":81.09,"MTOPDomainClassification (en)":95.29,"MTOPIntentClassification (en)":87.08,"ToxicConversationsClassification":70.89,"TweetSentimentExtractionClassification":62.48} -{"level_0":19,"index":156,"Rank":20,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":78.47,"AmazonCounterfactualClassification (en)":78.69,"AmazonPolarityClassification":95.91,"AmazonReviewsClassification (en)":55.79,"Banking77Classification":88.23,"EmotionClassification":49.77,"ImdbClassification":94.78,"MassiveIntentClassification (en)":80.57,"MassiveScenarioClassification (en)":82.39,"MTOPDomainClassification (en)":96.12,"MTOPIntentClassification (en)":86.11,"ToxicConversationsClassification":69.59,"TweetSentimentExtractionClassification":63.72} -{"level_0":20,"index":96,"Rank":21,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":78.33,"AmazonCounterfactualClassification (en)":77.93,"AmazonPolarityClassification":95.97,"AmazonReviewsClassification (en)":54.35,"Banking77Classification":88.81,"EmotionClassification":50.24,"ImdbClassification":94.79,"MassiveIntentClassification (en)":79.99,"MassiveScenarioClassification (en)":82.2,"MTOPDomainClassification (en)":96.36,"MTOPIntentClassification (en)":86.3,"ToxicConversationsClassification":69.33,"TweetSentimentExtractionClassification":63.64} -{"level_0":21,"index":19,"Rank":22,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.75,"AmazonCounterfactualClassification (en)":73.01,"AmazonPolarityClassification":93.97,"AmazonReviewsClassification (en)":54.2,"Banking77Classification":87.33,"EmotionClassification":46.77,"ImdbClassification":92.1,"MassiveIntentClassification (en)":78.94,"MassiveScenarioClassification (en)":81.41,"MTOPDomainClassification (en)":96.6,"MTOPIntentClassification (en)":82.9,"ToxicConversationsClassification":82.61,"TweetSentimentExtractionClassification":63.16} -{"level_0":22,"index":161,"Rank":23,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.56,"AmazonCounterfactualClassification (en)":76.24,"AmazonPolarityClassification":96.29,"AmazonReviewsClassification (en)":56.72,"Banking77Classification":85.73,"EmotionClassification":51.51,"ImdbClassification":94.6,"MassiveIntentClassification (en)":77.06,"MassiveScenarioClassification (en)":80.47,"MTOPDomainClassification (en)":93.93,"MTOPIntentClassification (en)":82.46,"ToxicConversationsClassification":71.06,"TweetSentimentExtractionClassification":64.62} -{"level_0":23,"index":178,"Rank":24,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.43,"AmazonCounterfactualClassification (en)":82.97,"AmazonPolarityClassification":90.98,"AmazonReviewsClassification (en)":48.71,"Banking77Classification":88.15,"EmotionClassification":52.18,"ImdbClassification":87.42,"MassiveIntentClassification (en)":79.67,"MassiveScenarioClassification (en)":82.82,"MTOPDomainClassification (en)":96.16,"MTOPIntentClassification (en)":85.75,"ToxicConversationsClassification":71.91,"TweetSentimentExtractionClassification":62.4} -{"level_0":24,"index":219,"Rank":25,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.17,"AmazonCounterfactualClassification (en)":79.66,"AmazonPolarityClassification":94.48,"AmazonReviewsClassification (en)":48.16,"Banking77Classification":88.91,"EmotionClassification":52.01,"ImdbClassification":89.47,"MassiveIntentClassification (en)":80.12,"MassiveScenarioClassification (en)":82.7,"MTOPDomainClassification (en)":96.46,"MTOPIntentClassification (en)":85.38,"ToxicConversationsClassification":66.18,"TweetSentimentExtractionClassification":62.54} -{"level_0":25,"index":18,"Rank":26,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.17,"AmazonCounterfactualClassification (en)":74.79,"AmazonPolarityClassification":93.02,"AmazonReviewsClassification (en)":53.31,"Banking77Classification":86.73,"EmotionClassification":46.39,"ImdbClassification":87.48,"MassiveIntentClassification (en)":77.67,"MassiveScenarioClassification (en)":81.77,"MTOPDomainClassification (en)":96.5,"MTOPIntentClassification (en)":82.81,"ToxicConversationsClassification":83.98,"TweetSentimentExtractionClassification":61.57} -{"level_0":26,"index":64,"Rank":27,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":76.63,"AmazonCounterfactualClassification (en)":77.58,"AmazonPolarityClassification":91.12,"AmazonReviewsClassification (en)":49.97,"Banking77Classification":88.31,"EmotionClassification":52.04,"ImdbClassification":87.42,"MassiveIntentClassification (en)":79.29,"MassiveScenarioClassification (en)":81.64,"MTOPDomainClassification (en)":96.04,"MTOPIntentClassification (en)":84.77,"ToxicConversationsClassification":69.26,"TweetSentimentExtractionClassification":62.14} -{"level_0":27,"index":34,"Rank":28,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.49,"AmazonCounterfactualClassification (en)":81.3,"AmazonPolarityClassification":95.62,"AmazonReviewsClassification (en)":51.72,"Banking77Classification":85.53,"EmotionClassification":51.57,"ImdbClassification":93.57,"MassiveIntentClassification (en)":73.84,"MassiveScenarioClassification (en)":78.74,"MTOPDomainClassification (en)":94.88,"MTOPIntentClassification (en)":76.52,"ToxicConversationsClassification":72.61,"TweetSentimentExtractionClassification":62.02} -{"level_0":28,"index":60,"Rank":29,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":76.33,"AmazonCounterfactualClassification (en)":82.22,"AmazonPolarityClassification":89.69,"AmazonReviewsClassification (en)":48.47,"Banking77Classification":88.17,"EmotionClassification":51.71,"ImdbClassification":85.78,"MassiveIntentClassification (en)":78.06,"MassiveScenarioClassification (en)":81.35,"MTOPDomainClassification (en)":95.57,"MTOPIntentClassification (en)":82.81,"ToxicConversationsClassification":71.01,"TweetSentimentExtractionClassification":61.11} -{"level_0":29,"index":118,"Rank":30,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.11,"AmazonCounterfactualClassification (en)":75.27,"AmazonPolarityClassification":93.23,"AmazonReviewsClassification (en)":49.72,"Banking77Classification":86.65,"EmotionClassification":55.89,"ImdbClassification":89.49,"MassiveIntentClassification (en)":75.53,"MassiveScenarioClassification (en)":79.23,"MTOPDomainClassification (en)":95.48,"MTOPIntentClassification (en)":79.09,"ToxicConversationsClassification":72.8,"TweetSentimentExtractionClassification":61.0} -{"level_0":30,"index":115,"Rank":31,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.03,"AmazonCounterfactualClassification (en)":75.96,"AmazonPolarityClassification":93.51,"AmazonReviewsClassification (en)":50.45,"Banking77Classification":87.3,"EmotionClassification":54.68,"ImdbClassification":89.66,"MassiveIntentClassification (en)":76.01,"MassiveScenarioClassification (en)":79.64,"MTOPDomainClassification (en)":95.3,"MTOPIntentClassification (en)":78.1,"ToxicConversationsClassification":72.42,"TweetSentimentExtractionClassification":59.32} -{"level_0":31,"index":117,"Rank":32,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.01,"AmazonCounterfactualClassification (en)":75.58,"AmazonPolarityClassification":93.41,"AmazonReviewsClassification (en)":49.06,"Banking77Classification":88.1,"EmotionClassification":54.72,"ImdbClassification":91.23,"MassiveIntentClassification (en)":76.2,"MassiveScenarioClassification (en)":79.35,"MTOPDomainClassification (en)":95.25,"MTOPIntentClassification (en)":78.24,"ToxicConversationsClassification":71.86,"TweetSentimentExtractionClassification":59.17} -{"level_0":32,"index":36,"Rank":33,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.01,"AmazonCounterfactualClassification (en)":77.85,"AmazonPolarityClassification":95.6,"AmazonReviewsClassification (en)":49.79,"Banking77Classification":86.09,"EmotionClassification":48.15,"ImdbClassification":93.97,"MassiveIntentClassification (en)":74.51,"MassiveScenarioClassification (en)":79.0,"MTOPDomainClassification (en)":94.92,"MTOPIntentClassification (en)":78.89,"ToxicConversationsClassification":71.2,"TweetSentimentExtractionClassification":62.18} -{"level_0":33,"index":186,"Rank":34,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.99,"AmazonCounterfactualClassification (en)":76.06,"AmazonPolarityClassification":91.98,"AmazonReviewsClassification (en)":47.94,"Banking77Classification":87.9,"EmotionClassification":52.03,"ImdbClassification":92.79,"MassiveIntentClassification (en)":77.41,"MassiveScenarioClassification (en)":80.45,"MTOPDomainClassification (en)":94.59,"MTOPIntentClassification (en)":79.26,"ToxicConversationsClassification":71.42,"TweetSentimentExtractionClassification":60.0} -{"level_0":34,"index":22,"Rank":35,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":75.97,"AmazonCounterfactualClassification (en)":75.85,"AmazonPolarityClassification":92.42,"AmazonReviewsClassification (en)":48.18,"Banking77Classification":87.79,"EmotionClassification":51.52,"ImdbClassification":92.85,"MassiveIntentClassification (en)":77.56,"MassiveScenarioClassification (en)":80.53,"MTOPDomainClassification (en)":94.59,"MTOPIntentClassification (en)":79.49,"ToxicConversationsClassification":70.91,"TweetSentimentExtractionClassification":59.94} -{"level_0":35,"index":150,"Rank":36,"Model":"bge-large-en-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.97,"AmazonCounterfactualClassification (en)":75.85,"AmazonPolarityClassification":92.42,"AmazonReviewsClassification (en)":48.18,"Banking77Classification":87.79,"EmotionClassification":51.52,"ImdbClassification":92.85,"MassiveIntentClassification (en)":77.56,"MassiveScenarioClassification (en)":80.53,"MTOPDomainClassification (en)":94.59,"MTOPIntentClassification (en)":79.49,"ToxicConversationsClassification":70.91,"TweetSentimentExtractionClassification":59.94} -{"level_0":36,"index":114,"Rank":37,"Model":"bge-large-en-v1.5-Q4_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.97,"AmazonCounterfactualClassification (en)":75.85,"AmazonPolarityClassification":92.42,"AmazonReviewsClassification (en)":48.18,"Banking77Classification":87.79,"EmotionClassification":51.52,"ImdbClassification":92.85,"MassiveIntentClassification (en)":77.56,"MassiveScenarioClassification (en)":80.53,"MTOPDomainClassification (en)":94.59,"MTOPIntentClassification (en)":79.49,"ToxicConversationsClassification":70.91,"TweetSentimentExtractionClassification":59.94} -{"level_0":37,"index":119,"Rank":38,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.97,"AmazonCounterfactualClassification (en)":75.76,"AmazonPolarityClassification":93.3,"AmazonReviewsClassification (en)":49.99,"Banking77Classification":86.4,"EmotionClassification":55.07,"ImdbClassification":90.15,"MassiveIntentClassification (en)":76.01,"MassiveScenarioClassification (en)":79.33,"MTOPDomainClassification (en)":95.29,"MTOPIntentClassification (en)":79.57,"ToxicConversationsClassification":69.44,"TweetSentimentExtractionClassification":61.27} -{"level_0":38,"index":125,"Rank":39,"Model":"NoInstruct-small-Embedding-v0-Q4_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.97,"AmazonCounterfactualClassification (en)":75.76,"AmazonPolarityClassification":93.3,"AmazonReviewsClassification (en)":49.99,"Banking77Classification":86.4,"EmotionClassification":55.07,"ImdbClassification":90.15,"MassiveIntentClassification (en)":76.01,"MassiveScenarioClassification (en)":79.33,"MTOPDomainClassification (en)":95.29,"MTOPIntentClassification (en)":79.57,"ToxicConversationsClassification":69.44,"TweetSentimentExtractionClassification":61.27} -{"level_0":39,"index":62,"Rank":40,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":75.92,"AmazonCounterfactualClassification (en)":79.94,"AmazonPolarityClassification":86.07,"AmazonReviewsClassification (en)":46.84,"Banking77Classification":88.05,"EmotionClassification":51.2,"ImdbClassification":82.94,"MassiveIntentClassification (en)":79.8,"MassiveScenarioClassification (en)":81.52,"MTOPDomainClassification (en)":96.14,"MTOPIntentClassification (en)":86.11,"ToxicConversationsClassification":70.59,"TweetSentimentExtractionClassification":61.9} -{"level_0":40,"index":194,"Rank":41,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.64,"AmazonCounterfactualClassification (en)":75.04,"AmazonPolarityClassification":93.84,"AmazonReviewsClassification (en)":49.18,"Banking77Classification":87.82,"EmotionClassification":50.88,"ImdbClassification":92.83,"MassiveIntentClassification (en)":76.24,"MassiveScenarioClassification (en)":79.95,"MTOPDomainClassification (en)":93.95,"MTOPIntentClassification (en)":76.79,"ToxicConversationsClassification":71.48,"TweetSentimentExtractionClassification":59.71} -{"level_0":41,"index":133,"Rank":42,"Model":"mxbai-embed-large-v1-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.64,"AmazonCounterfactualClassification (en)":75.04,"AmazonPolarityClassification":93.84,"AmazonReviewsClassification (en)":49.18,"Banking77Classification":87.82,"EmotionClassification":50.88,"ImdbClassification":92.83,"MassiveIntentClassification (en)":76.24,"MassiveScenarioClassification (en)":79.95,"MTOPDomainClassification (en)":93.95,"MTOPIntentClassification (en)":76.79,"ToxicConversationsClassification":71.48,"TweetSentimentExtractionClassification":59.71} -{"level_0":42,"index":111,"Rank":43,"Model":"UAE-Large-V1-Q5_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.58,"AmazonCounterfactualClassification (en)":75.55,"AmazonPolarityClassification":92.84,"AmazonReviewsClassification (en)":48.29,"Banking77Classification":87.69,"EmotionClassification":51.75,"ImdbClassification":92.78,"MassiveIntentClassification (en)":76.5,"MassiveScenarioClassification (en)":79.75,"MTOPDomainClassification (en)":94.02,"MTOPIntentClassification (en)":76.92,"ToxicConversationsClassification":71.09,"TweetSentimentExtractionClassification":59.75} -{"level_0":43,"index":108,"Rank":44,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.58,"AmazonCounterfactualClassification (en)":75.55,"AmazonPolarityClassification":92.84,"AmazonReviewsClassification (en)":48.29,"Banking77Classification":87.69,"EmotionClassification":51.75,"ImdbClassification":92.78,"MassiveIntentClassification (en)":76.5,"MassiveScenarioClassification (en)":79.75,"MTOPDomainClassification (en)":94.02,"MTOPIntentClassification (en)":76.92,"ToxicConversationsClassification":71.09,"TweetSentimentExtractionClassification":59.75} -{"level_0":44,"index":165,"Rank":45,"Model":"UAE-Large-V1-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.58,"AmazonCounterfactualClassification (en)":75.55,"AmazonPolarityClassification":92.84,"AmazonReviewsClassification (en)":48.29,"Banking77Classification":87.69,"EmotionClassification":51.75,"ImdbClassification":92.78,"MassiveIntentClassification (en)":76.5,"MassiveScenarioClassification (en)":79.75,"MTOPDomainClassification (en)":94.02,"MTOPIntentClassification (en)":76.92,"ToxicConversationsClassification":71.09,"TweetSentimentExtractionClassification":59.75} -{"level_0":45,"index":20,"Rank":46,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":75.53,"AmazonCounterfactualClassification (en)":76.15,"AmazonPolarityClassification":93.39,"AmazonReviewsClassification (en)":48.85,"Banking77Classification":86.95,"EmotionClassification":51.9,"ImdbClassification":90.81,"MassiveIntentClassification (en)":76.15,"MassiveScenarioClassification (en)":80.16,"MTOPDomainClassification (en)":94.17,"MTOPIntentClassification (en)":76.91,"ToxicConversationsClassification":71.58,"TweetSentimentExtractionClassification":59.38} -{"level_0":46,"index":181,"Rank":47,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.53,"AmazonCounterfactualClassification (en)":76.15,"AmazonPolarityClassification":93.39,"AmazonReviewsClassification (en)":48.85,"Banking77Classification":86.95,"EmotionClassification":51.9,"ImdbClassification":90.81,"MassiveIntentClassification (en)":76.15,"MassiveScenarioClassification (en)":80.16,"MTOPDomainClassification (en)":94.17,"MTOPIntentClassification (en)":76.91,"ToxicConversationsClassification":71.58,"TweetSentimentExtractionClassification":59.38} -{"level_0":47,"index":180,"Rank":48,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.53,"AmazonCounterfactualClassification (en)":76.15,"AmazonPolarityClassification":93.39,"AmazonReviewsClassification (en)":48.85,"Banking77Classification":86.95,"EmotionClassification":51.9,"ImdbClassification":90.81,"MassiveIntentClassification (en)":76.15,"MassiveScenarioClassification (en)":80.16,"MTOPDomainClassification (en)":94.17,"MTOPIntentClassification (en)":76.91,"ToxicConversationsClassification":71.58,"TweetSentimentExtractionClassification":59.38} -{"level_0":48,"index":182,"Rank":49,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.53,"AmazonCounterfactualClassification (en)":76.15,"AmazonPolarityClassification":93.39,"AmazonReviewsClassification (en)":48.85,"Banking77Classification":86.95,"EmotionClassification":51.9,"ImdbClassification":90.81,"MassiveIntentClassification (en)":76.15,"MassiveScenarioClassification (en)":80.16,"MTOPDomainClassification (en)":94.17,"MTOPIntentClassification (en)":76.91,"ToxicConversationsClassification":71.58,"TweetSentimentExtractionClassification":59.38} -{"level_0":49,"index":120,"Rank":50,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.53,"AmazonCounterfactualClassification (en)":76.15,"AmazonPolarityClassification":93.39,"AmazonReviewsClassification (en)":48.85,"Banking77Classification":86.95,"EmotionClassification":51.9,"ImdbClassification":90.81,"MassiveIntentClassification (en)":76.15,"MassiveScenarioClassification (en)":80.16,"MTOPDomainClassification (en)":94.17,"MTOPIntentClassification (en)":76.91,"ToxicConversationsClassification":71.58,"TweetSentimentExtractionClassification":59.38} -{"level_0":50,"index":179,"Rank":51,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.53,"AmazonCounterfactualClassification (en)":76.15,"AmazonPolarityClassification":93.39,"AmazonReviewsClassification (en)":48.85,"Banking77Classification":86.95,"EmotionClassification":51.9,"ImdbClassification":90.81,"MassiveIntentClassification (en)":76.15,"MassiveScenarioClassification (en)":80.16,"MTOPDomainClassification (en)":94.17,"MTOPIntentClassification (en)":76.91,"ToxicConversationsClassification":71.58,"TweetSentimentExtractionClassification":59.38} -{"level_0":51,"index":283,"Rank":52,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.45,"AmazonCounterfactualClassification (en)":78.93,"AmazonPolarityClassification":92.85,"AmazonReviewsClassification (en)":48.7,"Banking77Classification":85.69,"EmotionClassification":51.58,"ImdbClassification":87.67,"MassiveIntentClassification (en)":74.64,"MassiveScenarioClassification (en)":79.79,"MTOPDomainClassification (en)":95.36,"MTOPIntentClassification (en)":75.07,"ToxicConversationsClassification":72.92,"TweetSentimentExtractionClassification":62.22} -{"level_0":52,"index":137,"Rank":53,"Model":"stella-base-en-v2-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.28,"AmazonCounterfactualClassification (en)":77.19,"AmazonPolarityClassification":93.26,"AmazonReviewsClassification (en)":49.61,"Banking77Classification":84.73,"EmotionClassification":54.47,"ImdbClassification":91.31,"MassiveIntentClassification (en)":73.88,"MassiveScenarioClassification (en)":78.28,"MTOPDomainClassification (en)":93.5,"MTOPIntentClassification (en)":71.06,"ToxicConversationsClassification":72.99,"TweetSentimentExtractionClassification":63.07} -{"level_0":53,"index":151,"Rank":54,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.28,"AmazonCounterfactualClassification (en)":77.19,"AmazonPolarityClassification":93.26,"AmazonReviewsClassification (en)":49.61,"Banking77Classification":84.73,"EmotionClassification":54.47,"ImdbClassification":91.31,"MassiveIntentClassification (en)":73.88,"MassiveScenarioClassification (en)":78.28,"MTOPDomainClassification (en)":93.5,"MTOPIntentClassification (en)":71.06,"ToxicConversationsClassification":72.99,"TweetSentimentExtractionClassification":63.07} -{"level_0":54,"index":93,"Rank":55,"Model":"e5-large-v2-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.24,"AmazonCounterfactualClassification (en)":79.22,"AmazonPolarityClassification":93.75,"AmazonReviewsClassification (en)":48.61,"Banking77Classification":84.55,"EmotionClassification":49.45,"ImdbClassification":91.69,"MassiveIntentClassification (en)":73.84,"MassiveScenarioClassification (en)":78.11,"MTOPDomainClassification (en)":94.62,"MTOPIntentClassification (en)":77.14,"ToxicConversationsClassification":70.9,"TweetSentimentExtractionClassification":60.94} -{"level_0":55,"index":155,"Rank":56,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":75.24,"AmazonCounterfactualClassification (en)":79.22,"AmazonPolarityClassification":93.75,"AmazonReviewsClassification (en)":48.61,"Banking77Classification":84.55,"EmotionClassification":49.45,"ImdbClassification":91.69,"MassiveIntentClassification (en)":73.84,"MassiveScenarioClassification (en)":78.11,"MTOPDomainClassification (en)":94.62,"MTOPIntentClassification (en)":77.14,"ToxicConversationsClassification":70.9,"TweetSentimentExtractionClassification":60.94} -{"level_0":56,"index":261,"Rank":57,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.16,"AmazonCounterfactualClassification (en)":75.18,"AmazonPolarityClassification":93.07,"AmazonReviewsClassification (en)":48.42,"Banking77Classification":88.03,"EmotionClassification":51.93,"ImdbClassification":91.91,"MassiveIntentClassification (en)":75.91,"MassiveScenarioClassification (en)":79.43,"MTOPDomainClassification (en)":94.33,"MTOPIntentClassification (en)":76.61,"ToxicConversationsClassification":67.91,"TweetSentimentExtractionClassification":59.22} -{"level_0":57,"index":197,"Rank":58,"Model":"b1ade-embed-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.16,"AmazonCounterfactualClassification (en)":75.18,"AmazonPolarityClassification":93.07,"AmazonReviewsClassification (en)":48.42,"Banking77Classification":88.03,"EmotionClassification":51.93,"ImdbClassification":91.91,"MassiveIntentClassification (en)":75.91,"MassiveScenarioClassification (en)":79.43,"MTOPDomainClassification (en)":94.33,"MTOPIntentClassification (en)":76.61,"ToxicConversationsClassification":67.91,"TweetSentimentExtractionClassification":59.22} -{"level_0":58,"index":198,"Rank":59,"Model":"bge-base-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.98,"AmazonCounterfactualClassification (en)":76.16,"AmazonPolarityClassification":92.95,"AmazonReviewsClassification (en)":48.21,"Banking77Classification":86.35,"EmotionClassification":51.27,"ImdbClassification":89.72,"MassiveIntentClassification (en)":75.16,"MassiveScenarioClassification (en)":78.93,"MTOPDomainClassification (en)":93.95,"MTOPIntentClassification (en)":75.26,"ToxicConversationsClassification":72.12,"TweetSentimentExtractionClassification":59.67} -{"level_0":59,"index":160,"Rank":60,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":74.81,"AmazonCounterfactualClassification (en)":79.06,"AmazonPolarityClassification":93.49,"AmazonReviewsClassification (en)":47.56,"Banking77Classification":84.73,"EmotionClassification":46.5,"ImdbClassification":90.23,"MassiveIntentClassification (en)":73.76,"MassiveScenarioClassification (en)":77.51,"MTOPDomainClassification (en)":93.67,"MTOPIntentClassification (en)":77.9,"ToxicConversationsClassification":71.32,"TweetSentimentExtractionClassification":61.98} -{"level_0":60,"index":8,"Rank":61,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.79,"AmazonCounterfactualClassification (en)":71.43,"AmazonPolarityClassification":96.41,"AmazonReviewsClassification (en)":57.06,"Banking77Classification":81.64,"EmotionClassification":48.29,"ImdbClassification":95.49,"MassiveIntentClassification (en)":71.29,"MassiveScenarioClassification (en)":76.74,"MTOPDomainClassification (en)":96.3,"MTOPIntentClassification (en)":67.93,"ToxicConversationsClassification":75.45,"TweetSentimentExtractionClassification":59.44} -{"level_0":61,"index":53,"Rank":62,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.5,"AmazonCounterfactualClassification (en)":72.39,"AmazonPolarityClassification":93.71,"AmazonReviewsClassification (en)":50.85,"Banking77Classification":85.41,"EmotionClassification":55.93,"ImdbClassification":93.57,"MassiveIntentClassification (en)":73.88,"MassiveScenarioClassification (en)":77.42,"MTOPDomainClassification (en)":94.25,"MTOPIntentClassification (en)":67.51,"ToxicConversationsClassification":67.3,"TweetSentimentExtractionClassification":61.76} -{"level_0":62,"index":33,"Rank":63,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.31,"AmazonCounterfactualClassification (en)":78.63,"AmazonPolarityClassification":94.8,"AmazonReviewsClassification (en)":51.02,"Banking77Classification":79.7,"EmotionClassification":52.74,"ImdbClassification":92.17,"MassiveIntentClassification (en)":70.01,"MassiveScenarioClassification (en)":76.34,"MTOPDomainClassification (en)":93.63,"MTOPIntentClassification (en)":64.93,"ToxicConversationsClassification":73.04,"TweetSentimentExtractionClassification":64.72} -{"level_0":63,"index":24,"Rank":64,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":74.14,"AmazonCounterfactualClassification (en)":73.79,"AmazonPolarityClassification":92.75,"AmazonReviewsClassification (en)":46.99,"Banking77Classification":85.74,"EmotionClassification":47.84,"ImdbClassification":90.61,"MassiveIntentClassification (en)":74.81,"MassiveScenarioClassification (en)":78.7,"MTOPDomainClassification (en)":93.36,"MTOPIntentClassification (en)":74.75,"ToxicConversationsClassification":69.89,"TweetSentimentExtractionClassification":60.51} -{"level_0":64,"index":193,"Rank":65,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.14,"AmazonCounterfactualClassification (en)":74.76,"AmazonPolarityClassification":93.26,"AmazonReviewsClassification (en)":46.16,"Banking77Classification":86.65,"EmotionClassification":49.32,"ImdbClassification":90.4,"MassiveIntentClassification (en)":73.87,"MassiveScenarioClassification (en)":78.17,"MTOPDomainClassification (en)":93.1,"MTOPIntentClassification (en)":73.24,"ToxicConversationsClassification":71.53,"TweetSentimentExtractionClassification":59.25} -{"level_0":65,"index":26,"Rank":66,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.12,"AmazonCounterfactualClassification (en)":76.85,"AmazonPolarityClassification":91.52,"AmazonReviewsClassification (en)":47.36,"Banking77Classification":84.54,"EmotionClassification":48.35,"ImdbClassification":85.56,"MassiveIntentClassification (en)":73.72,"MassiveScenarioClassification (en)":77.8,"MTOPDomainClassification (en)":93.41,"MTOPIntentClassification (en)":77.25,"ToxicConversationsClassification":71.5,"TweetSentimentExtractionClassification":61.54} -{"level_0":66,"index":28,"Rank":67,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.12,"AmazonCounterfactualClassification (en)":76.85,"AmazonPolarityClassification":91.52,"AmazonReviewsClassification (en)":47.36,"Banking77Classification":84.54,"EmotionClassification":48.35,"ImdbClassification":85.56,"MassiveIntentClassification (en)":73.72,"MassiveScenarioClassification (en)":77.8,"MTOPDomainClassification (en)":93.41,"MTOPIntentClassification (en)":77.25,"ToxicConversationsClassification":71.5,"TweetSentimentExtractionClassification":61.54} -{"level_0":67,"index":206,"Rank":68,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":74.12,"AmazonCounterfactualClassification (en)":76.85,"AmazonPolarityClassification":91.52,"AmazonReviewsClassification (en)":47.36,"Banking77Classification":84.54,"EmotionClassification":48.35,"ImdbClassification":85.56,"MassiveIntentClassification (en)":73.72,"MassiveScenarioClassification (en)":77.8,"MTOPDomainClassification (en)":93.41,"MTOPIntentClassification (en)":77.25,"ToxicConversationsClassification":71.5,"TweetSentimentExtractionClassification":61.54} -{"level_0":68,"index":129,"Rank":69,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":74.12,"AmazonCounterfactualClassification (en)":76.85,"AmazonPolarityClassification":91.52,"AmazonReviewsClassification (en)":47.36,"Banking77Classification":84.54,"EmotionClassification":48.35,"ImdbClassification":85.56,"MassiveIntentClassification (en)":73.72,"MassiveScenarioClassification (en)":77.8,"MTOPDomainClassification (en)":93.41,"MTOPIntentClassification (en)":77.25,"ToxicConversationsClassification":71.5,"TweetSentimentExtractionClassification":61.54} -{"level_0":69,"index":29,"Rank":70,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.12,"AmazonCounterfactualClassification (en)":76.85,"AmazonPolarityClassification":91.52,"AmazonReviewsClassification (en)":47.36,"Banking77Classification":84.54,"EmotionClassification":48.35,"ImdbClassification":85.56,"MassiveIntentClassification (en)":73.72,"MassiveScenarioClassification (en)":77.8,"MTOPDomainClassification (en)":93.41,"MTOPIntentClassification (en)":77.25,"ToxicConversationsClassification":71.5,"TweetSentimentExtractionClassification":61.54} -{"level_0":70,"index":27,"Rank":71,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.12,"AmazonCounterfactualClassification (en)":76.85,"AmazonPolarityClassification":91.52,"AmazonReviewsClassification (en)":47.36,"Banking77Classification":84.54,"EmotionClassification":48.35,"ImdbClassification":85.56,"MassiveIntentClassification (en)":73.72,"MassiveScenarioClassification (en)":77.8,"MTOPDomainClassification (en)":93.41,"MTOPIntentClassification (en)":77.25,"ToxicConversationsClassification":71.5,"TweetSentimentExtractionClassification":61.54} -{"level_0":71,"index":65,"Rank":72,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":74.07,"AmazonCounterfactualClassification (en)":76.94,"AmazonPolarityClassification":85.29,"AmazonReviewsClassification (en)":47.09,"Banking77Classification":86.16,"EmotionClassification":48.88,"ImdbClassification":77.95,"MassiveIntentClassification (en)":76.65,"MassiveScenarioClassification (en)":79.99,"MTOPDomainClassification (en)":95.48,"MTOPIntentClassification (en)":82.84,"ToxicConversationsClassification":70.71,"TweetSentimentExtractionClassification":60.9} -{"level_0":72,"index":170,"Rank":73,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.96,"AmazonCounterfactualClassification (en)":70.85,"AmazonPolarityClassification":91.81,"AmazonReviewsClassification (en)":48.94,"Banking77Classification":84.61,"EmotionClassification":54.9,"ImdbClassification":93.14,"MassiveIntentClassification (en)":73.49,"MassiveScenarioClassification (en)":77.38,"MTOPDomainClassification (en)":93.64,"MTOPIntentClassification (en)":66.01,"ToxicConversationsClassification":71.19,"TweetSentimentExtractionClassification":61.55} -{"level_0":73,"index":148,"Rank":74,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":73.86,"AmazonCounterfactualClassification (en)":88.13,"AmazonPolarityClassification":91.53,"AmazonReviewsClassification (en)":47.86,"Banking77Classification":78.51,"EmotionClassification":52.73,"ImdbClassification":88.32,"MassiveIntentClassification (en)":68.9,"MassiveScenarioClassification (en)":73.35,"MTOPDomainClassification (en)":93.89,"MTOPIntentClassification (en)":67.98,"ToxicConversationsClassification":71.05,"TweetSentimentExtractionClassification":64.13} -{"level_0":74,"index":140,"Rank":75,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":73.84,"AmazonCounterfactualClassification (en)":77.78,"AmazonPolarityClassification":92.81,"AmazonReviewsClassification (en)":46.71,"Banking77Classification":83.53,"EmotionClassification":46.95,"ImdbClassification":86.15,"MassiveIntentClassification (en)":73.04,"MassiveScenarioClassification (en)":77.65,"MTOPDomainClassification (en)":93.69,"MTOPIntentClassification (en)":75.31,"ToxicConversationsClassification":72.11,"TweetSentimentExtractionClassification":60.39} -{"level_0":75,"index":153,"Rank":76,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":73.84,"AmazonCounterfactualClassification (en)":77.78,"AmazonPolarityClassification":92.81,"AmazonReviewsClassification (en)":46.71,"Banking77Classification":83.53,"EmotionClassification":46.95,"ImdbClassification":86.15,"MassiveIntentClassification (en)":73.04,"MassiveScenarioClassification (en)":77.65,"MTOPDomainClassification (en)":93.69,"MTOPIntentClassification (en)":75.31,"ToxicConversationsClassification":72.11,"TweetSentimentExtractionClassification":60.39} -{"level_0":76,"index":207,"Rank":77,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.65,"AmazonCounterfactualClassification (en)":78.67,"AmazonPolarityClassification":90.41,"AmazonReviewsClassification (en)":47.81,"Banking77Classification":83.82,"EmotionClassification":48.81,"ImdbClassification":83.78,"MassiveIntentClassification (en)":72.76,"MassiveScenarioClassification (en)":76.7,"MTOPDomainClassification (en)":93.47,"MTOPIntentClassification (en)":75.27,"ToxicConversationsClassification":71.39,"TweetSentimentExtractionClassification":60.87} -{"level_0":77,"index":213,"Rank":78,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.55,"AmazonCounterfactualClassification (en)":75.21,"AmazonPolarityClassification":91.81,"AmazonReviewsClassification (en)":47.16,"Banking77Classification":84.25,"EmotionClassification":47.99,"ImdbClassification":85.31,"MassiveIntentClassification (en)":73.46,"MassiveScenarioClassification (en)":77.08,"MTOPDomainClassification (en)":93.01,"MTOPIntentClassification (en)":75.03,"ToxicConversationsClassification":71.42,"TweetSentimentExtractionClassification":60.92} -{"level_0":78,"index":202,"Rank":79,"Model":"bge-small-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.52,"AmazonCounterfactualClassification (en)":74.19,"AmazonPolarityClassification":91.89,"AmazonReviewsClassification (en)":46.72,"Banking77Classification":85.1,"EmotionClassification":46.84,"ImdbClassification":89.35,"MassiveIntentClassification (en)":74.01,"MassiveScenarioClassification (en)":77.96,"MTOPDomainClassification (en)":92.67,"MTOPIntentClassification (en)":74.03,"ToxicConversationsClassification":69.5,"TweetSentimentExtractionClassification":60.02} -{"level_0":79,"index":175,"Rank":80,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":73.45,"AmazonCounterfactualClassification (en)":74.73,"AmazonPolarityClassification":88.54,"AmazonReviewsClassification (en)":45.26,"Banking77Classification":84.01,"EmotionClassification":48.77,"ImdbClassification":79.44,"MassiveIntentClassification (en)":71.93,"MassiveScenarioClassification (en)":74.49,"MTOPDomainClassification (en)":95.68,"MTOPIntentClassification (en)":83.15,"ToxicConversationsClassification":73.35,"TweetSentimentExtractionClassification":62.06} -{"level_0":80,"index":135,"Rank":81,"Model":"jina-embeddings-v2-base-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.45,"AmazonCounterfactualClassification (en)":74.73,"AmazonPolarityClassification":88.54,"AmazonReviewsClassification (en)":45.26,"Banking77Classification":84.01,"EmotionClassification":48.77,"ImdbClassification":79.44,"MassiveIntentClassification (en)":71.93,"MassiveScenarioClassification (en)":74.49,"MTOPDomainClassification (en)":95.68,"MTOPIntentClassification (en)":83.15,"ToxicConversationsClassification":73.35,"TweetSentimentExtractionClassification":62.06} -{"level_0":81,"index":246,"Rank":82,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":73.42,"AmazonCounterfactualClassification (en)":77.07,"AmazonPolarityClassification":92.79,"AmazonReviewsClassification (en)":48.93,"Banking77Classification":82.31,"EmotionClassification":48.57,"ImdbClassification":90.23,"MassiveIntentClassification (en)":73.44,"MassiveScenarioClassification (en)":74.82,"MTOPDomainClassification (en)":92.49,"MTOPIntentClassification (en)":68.33,"ToxicConversationsClassification":70.04,"TweetSentimentExtractionClassification":62.01} -{"level_0":82,"index":253,"Rank":83,"Model":"gte-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.33,"AmazonCounterfactualClassification (en)":72.63,"AmazonPolarityClassification":92.52,"AmazonReviewsClassification (en)":49.07,"Banking77Classification":86.06,"EmotionClassification":47.88,"ImdbClassification":88.46,"MassiveIntentClassification (en)":72.62,"MassiveScenarioClassification (en)":76.77,"MTOPDomainClassification (en)":93.51,"MTOPIntentClassification (en)":73.25,"ToxicConversationsClassification":70.56,"TweetSentimentExtractionClassification":56.58} -{"level_0":83,"index":211,"Rank":84,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":73.24,"AmazonCounterfactualClassification (en)":74.27,"AmazonPolarityClassification":91.89,"AmazonReviewsClassification (en)":46.97,"Banking77Classification":84.15,"EmotionClassification":47.73,"ImdbClassification":85.47,"MassiveIntentClassification (en)":73.07,"MassiveScenarioClassification (en)":76.82,"MTOPDomainClassification (en)":92.62,"MTOPIntentClassification (en)":74.27,"ToxicConversationsClassification":71.25,"TweetSentimentExtractionClassification":60.4} -{"level_0":84,"index":284,"Rank":85,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.21,"AmazonCounterfactualClassification (en)":76.42,"AmazonPolarityClassification":90.84,"AmazonReviewsClassification (en)":45.73,"Banking77Classification":83.01,"EmotionClassification":50.63,"ImdbClassification":83.66,"MassiveIntentClassification (en)":72.86,"MassiveScenarioClassification (en)":76.84,"MTOPDomainClassification (en)":93.91,"MTOPIntentClassification (en)":70.98,"ToxicConversationsClassification":71.91,"TweetSentimentExtractionClassification":61.72} -{"level_0":85,"index":154,"Rank":86,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":73.14,"AmazonCounterfactualClassification (en)":77.69,"AmazonPolarityClassification":90.05,"AmazonReviewsClassification (en)":43.02,"Banking77Classification":84.14,"EmotionClassification":48.05,"ImdbClassification":82.11,"MassiveIntentClassification (en)":73.22,"MassiveScenarioClassification (en)":77.39,"MTOPDomainClassification (en)":93.86,"MTOPIntentClassification (en)":76.4,"ToxicConversationsClassification":70.56,"TweetSentimentExtractionClassification":61.21} -{"level_0":86,"index":124,"Rank":87,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":73.13,"AmazonCounterfactualClassification (en)":75.63,"AmazonPolarityClassification":91.01,"AmazonReviewsClassification (en)":46.99,"Banking77Classification":81.93,"EmotionClassification":50.16,"ImdbClassification":87.84,"MassiveIntentClassification (en)":71.08,"MassiveScenarioClassification (en)":76.64,"MTOPDomainClassification (en)":93.36,"MTOPIntentClassification (en)":66.58,"ToxicConversationsClassification":72.6,"TweetSentimentExtractionClassification":63.71} -{"level_0":87,"index":149,"Rank":88,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":73.12,"AmazonCounterfactualClassification (en)":85.09,"AmazonPolarityClassification":86.54,"AmazonReviewsClassification (en)":42.96,"Banking77Classification":82.66,"EmotionClassification":53.24,"ImdbClassification":79.79,"MassiveIntentClassification (en)":71.48,"MassiveScenarioClassification (en)":76.47,"MTOPDomainClassification (en)":95.07,"MTOPIntentClassification (en)":72.26,"ToxicConversationsClassification":70.33,"TweetSentimentExtractionClassification":61.58} -{"level_0":88,"index":159,"Rank":89,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":73.02,"AmazonCounterfactualClassification (en)":78.97,"AmazonPolarityClassification":90.64,"AmazonReviewsClassification (en)":44.55,"Banking77Classification":82.74,"EmotionClassification":45.18,"ImdbClassification":85.46,"MassiveIntentClassification (en)":72.11,"MassiveScenarioClassification (en)":77.08,"MTOPDomainClassification (en)":93.13,"MTOPIntentClassification (en)":75.27,"ToxicConversationsClassification":69.78,"TweetSentimentExtractionClassification":61.28} -{"level_0":89,"index":252,"Rank":90,"Model":"gte-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.01,"AmazonCounterfactualClassification (en)":74.18,"AmazonPolarityClassification":91.77,"AmazonReviewsClassification (en)":48.96,"Banking77Classification":85.07,"EmotionClassification":48.65,"ImdbClassification":85.95,"MassiveIntentClassification (en)":71.47,"MassiveScenarioClassification (en)":76.38,"MTOPDomainClassification (en)":93.03,"MTOPIntentClassification (en)":72.04,"ToxicConversationsClassification":71.61,"TweetSentimentExtractionClassification":57.01} -{"level_0":90,"index":158,"Rank":91,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.94,"AmazonCounterfactualClassification (en)":77.6,"AmazonPolarityClassification":91.27,"AmazonReviewsClassification (en)":45.88,"Banking77Classification":81.64,"EmotionClassification":47.06,"ImdbClassification":86.0,"MassiveIntentClassification (en)":71.62,"MassiveScenarioClassification (en)":76.36,"MTOPDomainClassification (en)":92.7,"MTOPIntentClassification (en)":72.56,"ToxicConversationsClassification":71.1,"TweetSentimentExtractionClassification":61.46} -{"level_0":91,"index":245,"Rank":92,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":72.84,"AmazonCounterfactualClassification (en)":76.01,"AmazonPolarityClassification":93.17,"AmazonReviewsClassification (en)":48.18,"Banking77Classification":80.88,"EmotionClassification":51.95,"ImdbClassification":87.54,"MassiveIntentClassification (en)":72.09,"MassiveScenarioClassification (en)":73.26,"MTOPDomainClassification (en)":90.73,"MTOPIntentClassification (en)":68.15,"ToxicConversationsClassification":70.95,"TweetSentimentExtractionClassification":61.21} -{"level_0":92,"index":116,"Rank":93,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.72,"AmazonCounterfactualClassification (en)":72.9,"AmazonPolarityClassification":87.19,"AmazonReviewsClassification (en)":42.56,"Banking77Classification":84.24,"EmotionClassification":52.06,"ImdbClassification":78.54,"MassiveIntentClassification (en)":73.18,"MassiveScenarioClassification (en)":76.68,"MTOPDomainClassification (en)":94.78,"MTOPIntentClassification (en)":77.72,"ToxicConversationsClassification":72.9,"TweetSentimentExtractionClassification":59.85} -{"level_0":93,"index":152,"Rank":94,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":72.63,"AmazonCounterfactualClassification (en)":79.72,"AmazonPolarityClassification":87.96,"AmazonReviewsClassification (en)":42.65,"Banking77Classification":83.33,"EmotionClassification":49.44,"ImdbClassification":75.96,"MassiveIntentClassification (en)":72.25,"MassiveScenarioClassification (en)":76.76,"MTOPDomainClassification (en)":93.21,"MTOPIntentClassification (en)":74.77,"ToxicConversationsClassification":74.12,"TweetSentimentExtractionClassification":61.38} -{"level_0":94,"index":199,"Rank":95,"Model":"bge-base-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.62,"AmazonCounterfactualClassification (en)":75.39,"AmazonPolarityClassification":90.73,"AmazonReviewsClassification (en)":45.49,"Banking77Classification":84.3,"EmotionClassification":46.7,"ImdbClassification":83.15,"MassiveIntentClassification (en)":73.18,"MassiveScenarioClassification (en)":77.54,"MTOPDomainClassification (en)":92.05,"MTOPIntentClassification (en)":72.49,"ToxicConversationsClassification":70.92,"TweetSentimentExtractionClassification":59.49} -{"level_0":95,"index":147,"Rank":96,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":72.36,"AmazonCounterfactualClassification (en)":86.21,"AmazonPolarityClassification":88.36,"AmazonReviewsClassification (en)":44.64,"Banking77Classification":77.04,"EmotionClassification":51.76,"ImdbClassification":81.17,"MassiveIntentClassification (en)":67.48,"MassiveScenarioClassification (en)":72.59,"MTOPDomainClassification (en)":93.72,"MTOPIntentClassification (en)":70.26,"ToxicConversationsClassification":71.82,"TweetSentimentExtractionClassification":63.31} -{"level_0":96,"index":254,"Rank":97,"Model":"gte-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.31,"AmazonCounterfactualClassification (en)":73.22,"AmazonPolarityClassification":91.82,"AmazonReviewsClassification (en)":48.03,"Banking77Classification":84.08,"EmotionClassification":46.56,"ImdbClassification":86.8,"MassiveIntentClassification (en)":70.35,"MassiveScenarioClassification (en)":75.64,"MTOPDomainClassification (en)":93.05,"MTOPIntentClassification (en)":69.65,"ToxicConversationsClassification":70.33,"TweetSentimentExtractionClassification":58.22} -{"level_0":97,"index":244,"Rank":98,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":72.31,"AmazonCounterfactualClassification (en)":75.51,"AmazonPolarityClassification":92.87,"AmazonReviewsClassification (en)":47.12,"Banking77Classification":78.46,"EmotionClassification":51.74,"ImdbClassification":87.01,"MassiveIntentClassification (en)":71.78,"MassiveScenarioClassification (en)":73.16,"MTOPDomainClassification (en)":90.99,"MTOPIntentClassification (en)":64.98,"ToxicConversationsClassification":71.73,"TweetSentimentExtractionClassification":62.33} -{"level_0":98,"index":185,"Rank":99,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.25,"AmazonCounterfactualClassification (en)":66.94,"AmazonPolarityClassification":94.7,"AmazonReviewsClassification (en)":51.59,"Banking77Classification":80.34,"EmotionClassification":47.83,"ImdbClassification":89.66,"MassiveIntentClassification (en)":70.27,"MassiveScenarioClassification (en)":74.51,"MTOPDomainClassification (en)":93.77,"MTOPIntentClassification (en)":67.8,"ToxicConversationsClassification":70.85,"TweetSentimentExtractionClassification":58.71} -{"level_0":99,"index":66,"Rank":100,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":72.21,"AmazonCounterfactualClassification (en)":77.42,"AmazonPolarityClassification":82.05,"AmazonReviewsClassification (en)":40.81,"Banking77Classification":86.01,"EmotionClassification":48.38,"ImdbClassification":75.33,"MassiveIntentClassification (en)":75.58,"MassiveScenarioClassification (en)":79.16,"MTOPDomainClassification (en)":94.09,"MTOPIntentClassification (en)":77.05,"ToxicConversationsClassification":69.92,"TweetSentimentExtractionClassification":60.76} -{"level_0":100,"index":169,"Rank":101,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.13,"AmazonCounterfactualClassification (en)":73.12,"AmazonPolarityClassification":88.89,"AmazonReviewsClassification (en)":43.2,"Banking77Classification":85.36,"EmotionClassification":48.77,"ImdbClassification":78.46,"MassiveIntentClassification (en)":72.04,"MassiveScenarioClassification (en)":76.86,"MTOPDomainClassification (en)":93.43,"MTOPIntentClassification (en)":71.73,"ToxicConversationsClassification":71.58,"TweetSentimentExtractionClassification":62.14} -{"level_0":101,"index":210,"Rank":102,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":72.1,"AmazonCounterfactualClassification (en)":72.94,"AmazonPolarityClassification":91.35,"AmazonReviewsClassification (en)":45.73,"Banking77Classification":83.69,"EmotionClassification":45.88,"ImdbClassification":83.99,"MassiveIntentClassification (en)":71.76,"MassiveScenarioClassification (en)":75.67,"MTOPDomainClassification (en)":91.68,"MTOPIntentClassification (en)":72.47,"ToxicConversationsClassification":70.87,"TweetSentimentExtractionClassification":59.2} -{"level_0":102,"index":282,"Rank":103,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.97,"AmazonCounterfactualClassification (en)":73.96,"AmazonPolarityClassification":91.32,"AmazonReviewsClassification (en)":46.03,"Banking77Classification":83.19,"EmotionClassification":45.8,"ImdbClassification":85.93,"MassiveIntentClassification (en)":71.12,"MassiveScenarioClassification (en)":75.56,"MTOPDomainClassification (en)":92.76,"MTOPIntentClassification (en)":70.45,"ToxicConversationsClassification":68.52,"TweetSentimentExtractionClassification":58.98} -{"level_0":103,"index":167,"Rank":104,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.91,"AmazonCounterfactualClassification (en)":78.9,"AmazonPolarityClassification":87.98,"AmazonReviewsClassification (en)":42.58,"Banking77Classification":84.16,"EmotionClassification":47.88,"ImdbClassification":77.7,"MassiveIntentClassification (en)":70.39,"MassiveScenarioClassification (en)":77.19,"MTOPDomainClassification (en)":92.34,"MTOPIntentClassification (en)":70.63,"ToxicConversationsClassification":72.13,"TweetSentimentExtractionClassification":61.04} -{"level_0":104,"index":63,"Rank":105,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":71.88,"AmazonCounterfactualClassification (en)":75.7,"AmazonPolarityClassification":80.68,"AmazonReviewsClassification (en)":40.0,"Banking77Classification":84.77,"EmotionClassification":47.08,"ImdbClassification":75.19,"MassiveIntentClassification (en)":75.01,"MassiveScenarioClassification (en)":79.16,"MTOPDomainClassification (en)":94.47,"MTOPIntentClassification (en)":81.09,"ToxicConversationsClassification":71.85,"TweetSentimentExtractionClassification":57.61} -{"level_0":105,"index":270,"Rank":106,"Model":"gte-small-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.7,"AmazonCounterfactualClassification (en)":72.88,"AmazonPolarityClassification":91.03,"AmazonReviewsClassification (en)":46.94,"Banking77Classification":83.47,"EmotionClassification":45.8,"ImdbClassification":85.01,"MassiveIntentClassification (en)":70.23,"MassiveScenarioClassification (en)":75.29,"MTOPDomainClassification (en)":92.51,"MTOPIntentClassification (en)":69.62,"ToxicConversationsClassification":69.8,"TweetSentimentExtractionClassification":57.82} -{"level_0":106,"index":157,"Rank":107,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":71.67,"AmazonCounterfactualClassification (en)":76.22,"AmazonPolarityClassification":87.53,"AmazonReviewsClassification (en)":42.61,"Banking77Classification":81.87,"EmotionClassification":46.86,"ImdbClassification":75.55,"MassiveIntentClassification (en)":72.22,"MassiveScenarioClassification (en)":75.78,"MTOPDomainClassification (en)":92.05,"MTOPIntentClassification (en)":73.24,"ToxicConversationsClassification":72.76,"TweetSentimentExtractionClassification":63.31} -{"level_0":107,"index":61,"Rank":108,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":71.57,"AmazonCounterfactualClassification (en)":76.91,"AmazonPolarityClassification":79.05,"AmazonReviewsClassification (en)":40.08,"Banking77Classification":84.65,"EmotionClassification":46.58,"ImdbClassification":75.68,"MassiveIntentClassification (en)":73.84,"MassiveScenarioClassification (en)":79.17,"MTOPDomainClassification (en)":94.33,"MTOPIntentClassification (en)":79.54,"ToxicConversationsClassification":71.81,"TweetSentimentExtractionClassification":57.17} -{"level_0":108,"index":208,"Rank":109,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.17,"AmazonCounterfactualClassification (en)":76.99,"AmazonPolarityClassification":87.54,"AmazonReviewsClassification (en)":46.81,"Banking77Classification":81.09,"EmotionClassification":47.65,"ImdbClassification":86.32,"MassiveIntentClassification (en)":67.75,"MassiveScenarioClassification (en)":74.03,"MTOPDomainClassification (en)":92.36,"MTOPIntentClassification (en)":65.2,"ToxicConversationsClassification":72.96,"TweetSentimentExtractionClassification":55.28} -{"level_0":109,"index":281,"Rank":110,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.93,"AmazonCounterfactualClassification (en)":75.94,"AmazonPolarityClassification":86.72,"AmazonReviewsClassification (en)":44.78,"Banking77Classification":80.66,"EmotionClassification":48.74,"ImdbClassification":77.98,"MassiveIntentClassification (en)":70.15,"MassiveScenarioClassification (en)":75.33,"MTOPDomainClassification (en)":92.13,"MTOPIntentClassification (en)":64.68,"ToxicConversationsClassification":72.29,"TweetSentimentExtractionClassification":61.81} -{"level_0":110,"index":162,"Rank":111,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":70.74,"AmazonCounterfactualClassification (en)":73.79,"AmazonPolarityClassification":88.7,"AmazonReviewsClassification (en)":44.7,"Banking77Classification":79.42,"EmotionClassification":42.45,"ImdbClassification":80.82,"MassiveIntentClassification (en)":70.3,"MassiveScenarioClassification (en)":74.48,"MTOPDomainClassification (en)":91.07,"MTOPIntentClassification (en)":71.08,"ToxicConversationsClassification":69.39,"TweetSentimentExtractionClassification":62.62} -{"level_0":111,"index":112,"Rank":112,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.62,"AmazonCounterfactualClassification (en)":79.31,"AmazonPolarityClassification":76.66,"AmazonReviewsClassification (en)":35.28,"Banking77Classification":84.31,"EmotionClassification":55.61,"ImdbClassification":82.39,"MassiveIntentClassification (en)":69.24,"MassiveScenarioClassification (en)":74.11,"MTOPDomainClassification (en)":90.46,"MTOPIntentClassification (en)":66.0,"ToxicConversationsClassification":74.52,"TweetSentimentExtractionClassification":59.59} -{"level_0":112,"index":35,"Rank":113,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.57,"AmazonCounterfactualClassification (en)":70.03,"AmazonPolarityClassification":90.7,"AmazonReviewsClassification (en)":46.55,"Banking77Classification":78.12,"EmotionClassification":46.66,"ImdbClassification":85.64,"MassiveIntentClassification (en)":67.42,"MassiveScenarioClassification (en)":72.61,"MTOPDomainClassification (en)":91.86,"MTOPIntentClassification (en)":62.18,"ToxicConversationsClassification":70.66,"TweetSentimentExtractionClassification":64.46} -{"level_0":113,"index":184,"Rank":114,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.55,"AmazonCounterfactualClassification (en)":68.61,"AmazonPolarityClassification":93.38,"AmazonReviewsClassification (en)":50.64,"Banking77Classification":78.5,"EmotionClassification":46.37,"ImdbClassification":88.54,"MassiveIntentClassification (en)":67.24,"MassiveScenarioClassification (en)":72.98,"MTOPDomainClassification (en)":90.48,"MTOPIntentClassification (en)":59.82,"ToxicConversationsClassification":71.16,"TweetSentimentExtractionClassification":58.89} -{"level_0":114,"index":277,"Rank":115,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.44,"AmazonCounterfactualClassification (en)":76.4,"AmazonPolarityClassification":92.83,"AmazonReviewsClassification (en)":47.45,"Banking77Classification":68.04,"EmotionClassification":50.33,"ImdbClassification":89.38,"MassiveIntentClassification (en)":65.17,"MassiveScenarioClassification (en)":67.67,"MTOPDomainClassification (en)":89.89,"MTOPIntentClassification (en)":64.8,"ToxicConversationsClassification":70.0,"TweetSentimentExtractionClassification":63.35} -{"level_0":115,"index":107,"Rank":116,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.35,"AmazonCounterfactualClassification (en)":71.76,"AmazonPolarityClassification":86.61,"AmazonReviewsClassification (en)":42.61,"Banking77Classification":81.73,"EmotionClassification":44.71,"ImdbClassification":80.54,"MassiveIntentClassification (en)":70.13,"MassiveScenarioClassification (en)":74.86,"MTOPDomainClassification (en)":91.76,"MTOPIntentClassification (en)":69.9,"ToxicConversationsClassification":70.97,"TweetSentimentExtractionClassification":58.57} -{"level_0":116,"index":69,"Rank":117,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.25,"AmazonCounterfactualClassification (en)":72.1,"AmazonPolarityClassification":86.69,"AmazonReviewsClassification (en)":42.7,"Banking77Classification":81.92,"EmotionClassification":45.44,"ImdbClassification":80.8,"MassiveIntentClassification (en)":70.35,"MassiveScenarioClassification (en)":74.89,"MTOPDomainClassification (en)":92.12,"MTOPIntentClassification (en)":71.86,"ToxicConversationsClassification":65.46,"TweetSentimentExtractionClassification":58.66} -{"level_0":117,"index":42,"Rank":118,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":70.21,"AmazonCounterfactualClassification (en)":71.1,"AmazonPolarityClassification":86.69,"AmazonReviewsClassification (en)":45.51,"Banking77Classification":79.36,"EmotionClassification":48.79,"ImdbClassification":82.25,"MassiveIntentClassification (en)":71.52,"MassiveScenarioClassification (en)":73.87,"MTOPDomainClassification (en)":92.67,"MTOPIntentClassification (en)":69.77,"ToxicConversationsClassification":63.9,"TweetSentimentExtractionClassification":57.14} -{"level_0":118,"index":166,"Rank":119,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.17,"AmazonCounterfactualClassification (en)":69.94,"AmazonPolarityClassification":87.19,"AmazonReviewsClassification (en)":41.08,"Banking77Classification":82.89,"EmotionClassification":46.84,"ImdbClassification":74.45,"MassiveIntentClassification (en)":67.93,"MassiveScenarioClassification (en)":75.72,"MTOPDomainClassification (en)":92.18,"MTOPIntentClassification (en)":70.3,"ToxicConversationsClassification":72.01,"TweetSentimentExtractionClassification":61.51} -{"level_0":119,"index":84,"Rank":120,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.14,"AmazonCounterfactualClassification (en)":74.07,"AmazonPolarityClassification":82.31,"AmazonReviewsClassification (en)":41.58,"Banking77Classification":81.74,"EmotionClassification":49.92,"ImdbClassification":74.33,"MassiveIntentClassification (en)":70.0,"MassiveScenarioClassification (en)":75.03,"MTOPDomainClassification (en)":89.64,"MTOPIntentClassification (en)":70.68,"ToxicConversationsClassification":69.93,"TweetSentimentExtractionClassification":62.44} -{"level_0":120,"index":243,"Rank":121,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":69.81,"AmazonCounterfactualClassification (en)":75.82,"AmazonPolarityClassification":85.12,"AmazonReviewsClassification (en)":44.94,"Banking77Classification":76.48,"EmotionClassification":51.35,"ImdbClassification":77.34,"MassiveIntentClassification (en)":69.74,"MassiveScenarioClassification (en)":72.32,"MTOPDomainClassification (en)":90.34,"MTOPIntentClassification (en)":63.32,"ToxicConversationsClassification":68.2,"TweetSentimentExtractionClassification":62.71} -{"level_0":121,"index":209,"Rank":122,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":69.7,"AmazonCounterfactualClassification (en)":69.78,"AmazonPolarityClassification":88.74,"AmazonReviewsClassification (en)":43.11,"Banking77Classification":82.78,"EmotionClassification":42.92,"ImdbClassification":80.87,"MassiveIntentClassification (en)":69.34,"MassiveScenarioClassification (en)":74.21,"MTOPDomainClassification (en)":89.61,"MTOPIntentClassification (en)":68.9,"ToxicConversationsClassification":68.16,"TweetSentimentExtractionClassification":57.99} -{"level_0":122,"index":183,"Rank":123,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.65,"AmazonCounterfactualClassification (en)":70.12,"AmazonPolarityClassification":92.95,"AmazonReviewsClassification (en)":50.52,"Banking77Classification":75.59,"EmotionClassification":45.98,"ImdbClassification":90.22,"MassiveIntentClassification (en)":65.03,"MassiveScenarioClassification (en)":71.05,"MTOPDomainClassification (en)":88.63,"MTOPIntentClassification (en)":58.08,"ToxicConversationsClassification":69.33,"TweetSentimentExtractionClassification":58.25} -{"level_0":123,"index":176,"Rank":124,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.02,"AmazonCounterfactualClassification (en)":74.25,"AmazonPolarityClassification":78.31,"AmazonReviewsClassification (en)":38.32,"Banking77Classification":85.26,"EmotionClassification":46.58,"ImdbClassification":67.46,"MassiveIntentClassification (en)":70.76,"MassiveScenarioClassification (en)":73.82,"MTOPDomainClassification (en)":90.37,"MTOPIntentClassification (en)":71.97,"ToxicConversationsClassification":70.05,"TweetSentimentExtractionClassification":61.06} -{"level_0":124,"index":177,"Rank":125,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.82,"AmazonCounterfactualClassification (en)":71.36,"AmazonPolarityClassification":82.9,"AmazonReviewsClassification (en)":40.89,"Banking77Classification":78.25,"EmotionClassification":44.01,"ImdbClassification":73.64,"MassiveIntentClassification (en)":67.61,"MassiveScenarioClassification (en)":69.75,"MTOPDomainClassification (en)":93.96,"MTOPIntentClassification (en)":72.5,"ToxicConversationsClassification":71.54,"TweetSentimentExtractionClassification":59.4} -{"level_0":125,"index":136,"Rank":126,"Model":"jina-embeddings-v2-small-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.82,"AmazonCounterfactualClassification (en)":71.36,"AmazonPolarityClassification":82.9,"AmazonReviewsClassification (en)":40.89,"Banking77Classification":78.25,"EmotionClassification":44.01,"ImdbClassification":73.64,"MassiveIntentClassification (en)":67.61,"MassiveScenarioClassification (en)":69.75,"MTOPDomainClassification (en)":93.96,"MTOPIntentClassification (en)":72.5,"ToxicConversationsClassification":71.54,"TweetSentimentExtractionClassification":59.4} -{"level_0":126,"index":203,"Rank":127,"Model":"bge-small-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.55,"AmazonCounterfactualClassification (en)":70.72,"AmazonPolarityClassification":83.34,"AmazonReviewsClassification (en)":40.99,"Banking77Classification":81.25,"EmotionClassification":41.66,"ImdbClassification":74.81,"MassiveIntentClassification (en)":69.8,"MassiveScenarioClassification (en)":74.54,"MTOPDomainClassification (en)":91.22,"MTOPIntentClassification (en)":69.39,"ToxicConversationsClassification":67.44,"TweetSentimentExtractionClassification":57.48} -{"level_0":127,"index":100,"Rank":128,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.23,"AmazonCounterfactualClassification (en)":76.81,"AmazonPolarityClassification":82.83,"AmazonReviewsClassification (en)":38.93,"Banking77Classification":80.34,"EmotionClassification":46.54,"ImdbClassification":74.08,"MassiveIntentClassification (en)":66.92,"MassiveScenarioClassification (en)":72.75,"MTOPDomainClassification (en)":92.73,"MTOPIntentClassification (en)":65.18,"ToxicConversationsClassification":64.93,"TweetSentimentExtractionClassification":56.73} -{"level_0":128,"index":83,"Rank":129,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.13,"AmazonCounterfactualClassification (en)":69.22,"AmazonPolarityClassification":71.26,"AmazonReviewsClassification (en)":39.19,"Banking77Classification":84.49,"EmotionClassification":49.66,"ImdbClassification":66.64,"MassiveIntentClassification (en)":70.39,"MassiveScenarioClassification (en)":76.28,"MTOPDomainClassification (en)":93.47,"MTOPIntentClassification (en)":72.42,"ToxicConversationsClassification":67.71,"TweetSentimentExtractionClassification":56.85} -{"level_0":129,"index":76,"Rank":130,"Model":"gte-micro-v4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.13,"AmazonCounterfactualClassification (en)":71.84,"AmazonPolarityClassification":80.04,"AmazonReviewsClassification (en)":39.75,"Banking77Classification":80.92,"EmotionClassification":44.88,"ImdbClassification":71.96,"MassiveIntentClassification (en)":69.11,"MassiveScenarioClassification (en)":74.16,"MTOPDomainClassification (en)":90.87,"MTOPIntentClassification (en)":68.53,"ToxicConversationsClassification":66.04,"TweetSentimentExtractionClassification":59.43} -{"level_0":130,"index":168,"Rank":131,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.04,"AmazonCounterfactualClassification (en)":72.36,"AmazonPolarityClassification":83.81,"AmazonReviewsClassification (en)":38.97,"Banking77Classification":80.97,"EmotionClassification":44.77,"ImdbClassification":72.21,"MassiveIntentClassification (en)":66.09,"MassiveScenarioClassification (en)":73.26,"MTOPDomainClassification (en)":89.93,"MTOPIntentClassification (en)":66.56,"ToxicConversationsClassification":67.96,"TweetSentimentExtractionClassification":59.6} -{"level_0":131,"index":106,"Rank":132,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.04,"AmazonCounterfactualClassification (en)":67.76,"AmazonPolarityClassification":79.75,"AmazonReviewsClassification (en)":37.45,"Banking77Classification":81.17,"EmotionClassification":44.53,"ImdbClassification":76.46,"MassiveIntentClassification (en)":68.58,"MassiveScenarioClassification (en)":73.92,"MTOPDomainClassification (en)":90.67,"MTOPIntentClassification (en)":68.29,"ToxicConversationsClassification":70.23,"TweetSentimentExtractionClassification":57.64} -{"level_0":132,"index":101,"Rank":133,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.01,"AmazonCounterfactualClassification (en)":78.48,"AmazonPolarityClassification":78.74,"AmazonReviewsClassification (en)":39.93,"Banking77Classification":80.26,"EmotionClassification":47.26,"ImdbClassification":71.79,"MassiveIntentClassification (en)":66.55,"MassiveScenarioClassification (en)":73.11,"MTOPDomainClassification (en)":93.04,"MTOPIntentClassification (en)":63.52,"ToxicConversationsClassification":66.74,"TweetSentimentExtractionClassification":56.73} -{"level_0":133,"index":174,"Rank":134,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.98,"AmazonCounterfactualClassification (en)":73.76,"AmazonPolarityClassification":77.52,"AmazonReviewsClassification (en)":38.5,"Banking77Classification":83.94,"EmotionClassification":44.64,"ImdbClassification":68.61,"MassiveIntentClassification (en)":69.11,"MassiveScenarioClassification (en)":73.74,"MTOPDomainClassification (en)":89.38,"MTOPIntentClassification (en)":66.49,"ToxicConversationsClassification":69.65,"TweetSentimentExtractionClassification":60.43} -{"level_0":134,"index":258,"Rank":135,"Model":"universal-sentence-encoder-large-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.9,"AmazonCounterfactualClassification (en)":76.19,"AmazonPolarityClassification":69.63,"AmazonReviewsClassification (en)":35.53,"Banking77Classification":78.13,"EmotionClassification":45.48,"ImdbClassification":64.06,"MassiveIntentClassification (en)":71.69,"MassiveScenarioClassification (en)":77.2,"MTOPDomainClassification (en)":92.94,"MTOPIntentClassification (en)":74.43,"ToxicConversationsClassification":70.17,"TweetSentimentExtractionClassification":59.31} -{"level_0":135,"index":68,"Rank":136,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.9,"AmazonCounterfactualClassification (en)":71.87,"AmazonPolarityClassification":78.79,"AmazonReviewsClassification (en)":39.31,"Banking77Classification":80.58,"EmotionClassification":44.79,"ImdbClassification":71.53,"MassiveIntentClassification (en)":68.84,"MassiveScenarioClassification (en)":73.75,"MTOPDomainClassification (en)":90.79,"MTOPIntentClassification (en)":68.72,"ToxicConversationsClassification":66.29,"TweetSentimentExtractionClassification":59.49} -{"level_0":136,"index":113,"Rank":137,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.8,"AmazonCounterfactualClassification (en)":68.75,"AmazonPolarityClassification":81.3,"AmazonReviewsClassification (en)":38.57,"Banking77Classification":79.98,"EmotionClassification":40.09,"ImdbClassification":80.09,"MassiveIntentClassification (en)":67.58,"MassiveScenarioClassification (en)":73.47,"MTOPDomainClassification (en)":90.36,"MTOPIntentClassification (en)":66.45,"ToxicConversationsClassification":69.34,"TweetSentimentExtractionClassification":57.6} -{"level_0":137,"index":71,"Rank":138,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.77,"AmazonCounterfactualClassification (en)":73.18,"AmazonPolarityClassification":79.99,"AmazonReviewsClassification (en)":39.66,"Banking77Classification":77.95,"EmotionClassification":44.38,"ImdbClassification":73.02,"MassiveIntentClassification (en)":67.75,"MassiveScenarioClassification (en)":72.36,"MTOPDomainClassification (en)":89.87,"MTOPIntentClassification (en)":71.03,"ToxicConversationsClassification":64.41,"TweetSentimentExtractionClassification":59.66} -{"level_0":138,"index":172,"Rank":139,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.76,"AmazonCounterfactualClassification (en)":68.93,"AmazonPolarityClassification":69.14,"AmazonReviewsClassification (en)":31.38,"Banking77Classification":85.34,"EmotionClassification":45.84,"ImdbClassification":66.42,"MassiveIntentClassification (en)":72.73,"MassiveScenarioClassification (en)":77.08,"MTOPDomainClassification (en)":92.83,"MTOPIntentClassification (en)":76.09,"ToxicConversationsClassification":69.13,"TweetSentimentExtractionClassification":58.16} -{"level_0":139,"index":67,"Rank":140,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":67.67,"AmazonCounterfactualClassification (en)":72.93,"AmazonPolarityClassification":74.28,"AmazonReviewsClassification (en)":36.14,"Banking77Classification":79.0,"EmotionClassification":42.85,"ImdbClassification":71.92,"MassiveIntentClassification (en)":69.99,"MassiveScenarioClassification (en)":75.15,"MTOPDomainClassification (en)":91.24,"MTOPIntentClassification (en)":74.08,"ToxicConversationsClassification":68.4,"TweetSentimentExtractionClassification":56.08} -{"level_0":140,"index":75,"Rank":141,"Model":"gte-micro-v3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.47,"AmazonCounterfactualClassification (en)":71.43,"AmazonPolarityClassification":77.72,"AmazonReviewsClassification (en)":38.96,"Banking77Classification":80.4,"EmotionClassification":44.54,"ImdbClassification":70.59,"MassiveIntentClassification (en)":68.5,"MassiveScenarioClassification (en)":73.55,"MTOPDomainClassification (en)":90.5,"MTOPIntentClassification (en)":67.52,"ToxicConversationsClassification":66.69,"TweetSentimentExtractionClassification":59.29} -{"level_0":141,"index":74,"Rank":142,"Model":"gte-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.47,"AmazonCounterfactualClassification (en)":71.43,"AmazonPolarityClassification":77.72,"AmazonReviewsClassification (en)":38.96,"Banking77Classification":80.4,"EmotionClassification":44.54,"ImdbClassification":70.59,"MassiveIntentClassification (en)":68.5,"MassiveScenarioClassification (en)":73.55,"MTOPDomainClassification (en)":90.5,"MTOPIntentClassification (en)":67.52,"ToxicConversationsClassification":66.69,"TweetSentimentExtractionClassification":59.29} -{"level_0":142,"index":238,"Rank":143,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":67.41,"AmazonCounterfactualClassification (en)":67.3,"AmazonPolarityClassification":75.05,"AmazonReviewsClassification (en)":37.3,"Banking77Classification":82.32,"EmotionClassification":43.19,"ImdbClassification":70.8,"MassiveIntentClassification (en)":70.61,"MassiveScenarioClassification (en)":77.77,"MTOPDomainClassification (en)":93.84,"MTOPIntentClassification (en)":67.71,"ToxicConversationsClassification":68.48,"TweetSentimentExtractionClassification":54.54} -{"level_0":143,"index":217,"Rank":144,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":67.32,"AmazonCounterfactualClassification (en)":75.75,"AmazonPolarityClassification":82.47,"AmazonReviewsClassification (en)":39.6,"Banking77Classification":75.76,"EmotionClassification":44.81,"ImdbClassification":73.53,"MassiveIntentClassification (en)":65.95,"MassiveScenarioClassification (en)":70.78,"MTOPDomainClassification (en)":84.29,"MTOPIntentClassification (en)":63.14,"ToxicConversationsClassification":72.04,"TweetSentimentExtractionClassification":59.73} -{"level_0":144,"index":44,"Rank":145,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.3,"AmazonCounterfactualClassification (en)":69.73,"AmazonPolarityClassification":86.9,"AmazonReviewsClassification (en)":44.05,"Banking77Classification":75.41,"EmotionClassification":41.99,"ImdbClassification":79.54,"MassiveIntentClassification (en)":63.34,"MassiveScenarioClassification (en)":72.37,"MTOPDomainClassification (en)":89.67,"MTOPIntentClassification (en)":60.28,"ToxicConversationsClassification":67.94,"TweetSentimentExtractionClassification":56.41} -{"level_0":145,"index":236,"Rank":146,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":67.14,"AmazonCounterfactualClassification (en)":70.03,"AmazonPolarityClassification":73.92,"AmazonReviewsClassification (en)":37.21,"Banking77Classification":81.21,"EmotionClassification":46.33,"ImdbClassification":70.86,"MassiveIntentClassification (en)":70.06,"MassiveScenarioClassification (en)":75.49,"MTOPDomainClassification (en)":94.01,"MTOPIntentClassification (en)":63.86,"ToxicConversationsClassification":68.65,"TweetSentimentExtractionClassification":54.09} -{"level_0":146,"index":82,"Rank":147,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.13,"AmazonCounterfactualClassification (en)":67.57,"AmazonPolarityClassification":71.44,"AmazonReviewsClassification (en)":35.75,"Banking77Classification":83.22,"EmotionClassification":49.21,"ImdbClassification":63.53,"MassiveIntentClassification (en)":69.01,"MassiveScenarioClassification (en)":75.9,"MTOPDomainClassification (en)":92.56,"MTOPIntentClassification (en)":71.85,"ToxicConversationsClassification":68.84,"TweetSentimentExtractionClassification":56.69} -{"level_0":147,"index":237,"Rank":148,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":67.11,"AmazonCounterfactualClassification (en)":68.6,"AmazonPolarityClassification":74.58,"AmazonReviewsClassification (en)":38.2,"Banking77Classification":82.22,"EmotionClassification":45.54,"ImdbClassification":68.15,"MassiveIntentClassification (en)":70.23,"MassiveScenarioClassification (en)":75.94,"MTOPDomainClassification (en)":93.6,"MTOPIntentClassification (en)":65.93,"ToxicConversationsClassification":67.56,"TweetSentimentExtractionClassification":54.77} -{"level_0":148,"index":99,"Rank":149,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.07,"AmazonCounterfactualClassification (en)":74.81,"AmazonPolarityClassification":78.4,"AmazonReviewsClassification (en)":36.72,"Banking77Classification":80.06,"EmotionClassification":46.46,"ImdbClassification":72.88,"MassiveIntentClassification (en)":65.79,"MassiveScenarioClassification (en)":71.1,"MTOPDomainClassification (en)":92.62,"MTOPIntentClassification (en)":64.51,"ToxicConversationsClassification":64.71,"TweetSentimentExtractionClassification":56.74} -{"level_0":149,"index":121,"Rank":150,"Model":"snowflake-arctic-embed-l-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.07,"AmazonCounterfactualClassification (en)":74.81,"AmazonPolarityClassification":78.4,"AmazonReviewsClassification (en)":36.72,"Banking77Classification":80.06,"EmotionClassification":46.46,"ImdbClassification":72.88,"MassiveIntentClassification (en)":65.79,"MassiveScenarioClassification (en)":71.1,"MTOPDomainClassification (en)":92.62,"MTOPIntentClassification (en)":64.51,"ToxicConversationsClassification":64.71,"TweetSentimentExtractionClassification":56.74} -{"level_0":150,"index":132,"Rank":151,"Model":"embedder-100p<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.95,"AmazonCounterfactualClassification (en)":67.06,"AmazonPolarityClassification":70.41,"AmazonReviewsClassification (en)":33.21,"Banking77Classification":82.73,"EmotionClassification":43.47,"ImdbClassification":67.3,"MassiveIntentClassification (en)":71.77,"MassiveScenarioClassification (en)":77.85,"MTOPDomainClassification (en)":91.82,"MTOPIntentClassification (en)":74.7,"ToxicConversationsClassification":67.48,"TweetSentimentExtractionClassification":55.62} -{"level_0":151,"index":214,"Rank":152,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":66.68,"AmazonCounterfactualClassification (en)":72.19,"AmazonPolarityClassification":68.63,"AmazonReviewsClassification (en)":37.42,"Banking77Classification":80.02,"EmotionClassification":44.77,"ImdbClassification":67.04,"MassiveIntentClassification (en)":67.78,"MassiveScenarioClassification (en)":76.0,"MTOPDomainClassification (en)":93.18,"MTOPIntentClassification (en)":69.31,"ToxicConversationsClassification":67.77,"TweetSentimentExtractionClassification":56.1} -{"level_0":152,"index":79,"Rank":153,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.52,"AmazonCounterfactualClassification (en)":65.21,"AmazonPolarityClassification":73.21,"AmazonReviewsClassification (en)":34.96,"Banking77Classification":82.06,"EmotionClassification":46.39,"ImdbClassification":64.05,"MassiveIntentClassification (en)":68.65,"MassiveScenarioClassification (en)":76.04,"MTOPDomainClassification (en)":92.08,"MTOPIntentClassification (en)":71.19,"ToxicConversationsClassification":68.73,"TweetSentimentExtractionClassification":55.67} -{"level_0":153,"index":212,"Rank":154,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":66.48,"AmazonCounterfactualClassification (en)":66.85,"AmazonPolarityClassification":85.92,"AmazonReviewsClassification (en)":41.02,"Banking77Classification":80.63,"EmotionClassification":40.55,"ImdbClassification":76.6,"MassiveIntentClassification (en)":64.95,"MassiveScenarioClassification (en)":70.38,"MTOPDomainClassification (en)":86.31,"MTOPIntentClassification (en)":62.77,"ToxicConversationsClassification":66.53,"TweetSentimentExtractionClassification":55.23} -{"level_0":154,"index":105,"Rank":155,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.35,"AmazonCounterfactualClassification (en)":66.27,"AmazonPolarityClassification":75.37,"AmazonReviewsClassification (en)":35.81,"Banking77Classification":80.58,"EmotionClassification":42.47,"ImdbClassification":70.7,"MassiveIntentClassification (en)":67.78,"MassiveScenarioClassification (en)":73.04,"MTOPDomainClassification (en)":90.25,"MTOPIntentClassification (en)":67.95,"ToxicConversationsClassification":69.21,"TweetSentimentExtractionClassification":56.71} -{"level_0":155,"index":123,"Rank":156,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.19,"AmazonCounterfactualClassification (en)":68.06,"AmazonPolarityClassification":68.97,"AmazonReviewsClassification (en)":33.86,"Banking77Classification":84.33,"EmotionClassification":44.87,"ImdbClassification":61.77,"MassiveIntentClassification (en)":69.67,"MassiveScenarioClassification (en)":75.34,"MTOPDomainClassification (en)":93.68,"MTOPIntentClassification (en)":71.34,"ToxicConversationsClassification":66.55,"TweetSentimentExtractionClassification":55.85} -{"level_0":156,"index":171,"Rank":157,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.07,"AmazonCounterfactualClassification (en)":66.73,"AmazonPolarityClassification":67.61,"AmazonReviewsClassification (en)":31.18,"Banking77Classification":84.06,"EmotionClassification":44.68,"ImdbClassification":63.87,"MassiveIntentClassification (en)":71.09,"MassiveScenarioClassification (en)":76.25,"MTOPDomainClassification (en)":91.53,"MTOPIntentClassification (en)":72.83,"ToxicConversationsClassification":66.15,"TweetSentimentExtractionClassification":56.85} -{"level_0":157,"index":251,"Rank":158,"Model":"Angle_BERT<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.01,"AmazonCounterfactualClassification (en)":77.94,"AmazonPolarityClassification":76.0,"AmazonReviewsClassification (en)":37.18,"Banking77Classification":75.5,"EmotionClassification":45.21,"ImdbClassification":68.85,"MassiveIntentClassification (en)":66.32,"MassiveScenarioClassification (en)":70.62,"MTOPDomainClassification (en)":85.38,"MTOPIntentClassification (en)":64.51,"ToxicConversationsClassification":67.08,"TweetSentimentExtractionClassification":57.57} -{"level_0":158,"index":285,"Rank":159,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":65.94,"AmazonCounterfactualClassification (en)":74.16,"AmazonPolarityClassification":61.91,"AmazonReviewsClassification (en)":32.06,"Banking77Classification":82.05,"EmotionClassification":46.65,"ImdbClassification":65.02,"MassiveIntentClassification (en)":68.48,"MassiveScenarioClassification (en)":74.98,"MTOPDomainClassification (en)":93.17,"MTOPIntentClassification (en)":71.1,"ToxicConversationsClassification":68.15,"TweetSentimentExtractionClassification":53.57} -{"level_0":159,"index":260,"Rank":160,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.78,"AmazonCounterfactualClassification (en)":70.81,"AmazonPolarityClassification":67.05,"AmazonReviewsClassification (en)":35.85,"Banking77Classification":74.67,"EmotionClassification":42.31,"ImdbClassification":63.69,"MassiveIntentClassification (en)":69.05,"MassiveScenarioClassification (en)":75.8,"MTOPDomainClassification (en)":92.48,"MTOPIntentClassification (en)":70.27,"ToxicConversationsClassification":68.22,"TweetSentimentExtractionClassification":59.19} -{"level_0":160,"index":72,"Rank":161,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.72,"AmazonCounterfactualClassification (en)":70.4,"AmazonPolarityClassification":82.04,"AmazonReviewsClassification (en)":42.41,"Banking77Classification":71.07,"EmotionClassification":46.78,"ImdbClassification":74.6,"MassiveIntentClassification (en)":62.27,"MassiveScenarioClassification (en)":68.22,"MTOPDomainClassification (en)":88.24,"MTOPIntentClassification (en)":54.94,"ToxicConversationsClassification":65.15,"TweetSentimentExtractionClassification":62.54} -{"level_0":161,"index":103,"Rank":162,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.65,"AmazonCounterfactualClassification (en)":71.18,"AmazonPolarityClassification":78.75,"AmazonReviewsClassification (en)":38.26,"Banking77Classification":79.12,"EmotionClassification":45.77,"ImdbClassification":69.49,"MassiveIntentClassification (en)":64.76,"MassiveScenarioClassification (en)":70.05,"MTOPDomainClassification (en)":90.94,"MTOPIntentClassification (en)":58.63,"ToxicConversationsClassification":61.96,"TweetSentimentExtractionClassification":58.9} -{"level_0":162,"index":235,"Rank":163,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":65.25,"AmazonCounterfactualClassification (en)":69.33,"AmazonPolarityClassification":67.82,"AmazonReviewsClassification (en)":38.48,"Banking77Classification":79.26,"EmotionClassification":42.2,"ImdbClassification":65.99,"MassiveIntentClassification (en)":67.05,"MassiveScenarioClassification (en)":75.4,"MTOPDomainClassification (en)":92.42,"MTOPIntentClassification (en)":62.44,"ToxicConversationsClassification":66.6,"TweetSentimentExtractionClassification":56.02} -{"level_0":163,"index":230,"Rank":164,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":65.03,"AmazonCounterfactualClassification (en)":65.03,"AmazonPolarityClassification":67.14,"AmazonReviewsClassification (en)":31.44,"Banking77Classification":81.7,"EmotionClassification":42.22,"ImdbClassification":71.17,"MassiveIntentClassification (en)":69.76,"MassiveScenarioClassification (en)":75.67,"MTOPDomainClassification (en)":91.89,"MTOPIntentClassification (en)":68.27,"ToxicConversationsClassification":61.05,"TweetSentimentExtractionClassification":55.05} -{"level_0":164,"index":70,"Rank":165,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.86,"AmazonCounterfactualClassification (en)":69.6,"AmazonPolarityClassification":82.09,"AmazonReviewsClassification (en)":41.94,"Banking77Classification":67.13,"EmotionClassification":45.79,"ImdbClassification":74.95,"MassiveIntentClassification (en)":61.52,"MassiveScenarioClassification (en)":67.04,"MTOPDomainClassification (en)":87.27,"MTOPIntentClassification (en)":54.66,"ToxicConversationsClassification":64.47,"TweetSentimentExtractionClassification":61.8} -{"level_0":165,"index":59,"Rank":166,"Model":"paraphrase-multilingual-mpnet-base-v2-KE_Sieve<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.85,"AmazonCounterfactualClassification (en)":79.06,"AmazonPolarityClassification":70.19,"AmazonReviewsClassification (en)":34.29,"Banking77Classification":75.89,"EmotionClassification":40.26,"ImdbClassification":61.14,"MassiveIntentClassification (en)":65.6,"MassiveScenarioClassification (en)":70.37,"MTOPDomainClassification (en)":87.22,"MTOPIntentClassification (en)":69.45,"ToxicConversationsClassification":70.26,"TweetSentimentExtractionClassification":54.49} -{"level_0":166,"index":239,"Rank":167,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":64.71,"AmazonCounterfactualClassification (en)":64.06,"AmazonPolarityClassification":66.88,"AmazonReviewsClassification (en)":34.85,"Banking77Classification":82.35,"EmotionClassification":41.91,"ImdbClassification":60.17,"MassiveIntentClassification (en)":70.4,"MassiveScenarioClassification (en)":73.73,"MTOPDomainClassification (en)":91.34,"MTOPIntentClassification (en)":71.07,"ToxicConversationsClassification":64.01,"TweetSentimentExtractionClassification":55.74} -{"level_0":167,"index":234,"Rank":168,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":64.67,"AmazonCounterfactualClassification (en)":71.81,"AmazonPolarityClassification":68.0,"AmazonReviewsClassification (en)":35.45,"Banking77Classification":71.48,"EmotionClassification":40.04,"ImdbClassification":61.52,"MassiveIntentClassification (en)":66.71,"MassiveScenarioClassification (en)":74.0,"MTOPDomainClassification (en)":91.59,"MTOPIntentClassification (en)":66.4,"ToxicConversationsClassification":69.09,"TweetSentimentExtractionClassification":59.97} -{"level_0":168,"index":78,"Rank":169,"Model":"test25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.56,"AmazonCounterfactualClassification (en)":69.57,"AmazonPolarityClassification":74.74,"AmazonReviewsClassification (en)":35.36,"Banking77Classification":77.51,"EmotionClassification":39.09,"ImdbClassification":67.87,"MassiveIntentClassification (en)":65.15,"MassiveScenarioClassification (en)":70.75,"MTOPDomainClassification (en)":89.98,"MTOPIntentClassification (en)":66.43,"ToxicConversationsClassification":64.01,"TweetSentimentExtractionClassification":54.26} -{"level_0":169,"index":257,"Rank":170,"Model":"universal-sentence-encoder-4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.45,"AmazonCounterfactualClassification (en)":70.67,"AmazonPolarityClassification":67.73,"AmazonReviewsClassification (en)":32.62,"Banking77Classification":74.12,"EmotionClassification":38.64,"ImdbClassification":68.43,"MassiveIntentClassification (en)":67.23,"MassiveScenarioClassification (en)":72.79,"MTOPDomainClassification (en)":90.12,"MTOPIntentClassification (en)":65.0,"ToxicConversationsClassification":68.99,"TweetSentimentExtractionClassification":57.01} -{"level_0":170,"index":192,"Rank":171,"Model":"all-MiniLM-L6-v2-ds<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.67,"AmazonCounterfactualClassification (en)":61.99,"AmazonPolarityClassification":64.75,"AmazonReviewsClassification (en)":29.79,"Banking77Classification":82.92,"EmotionClassification":40.56,"ImdbClassification":61.71,"MassiveIntentClassification (en)":68.72,"MassiveScenarioClassification (en)":74.39,"MTOPDomainClassification (en)":91.45,"MTOPIntentClassification (en)":70.03,"ToxicConversationsClassification":65.33,"TweetSentimentExtractionClassification":52.41} -{"level_0":171,"index":77,"Rank":172,"Model":"test24<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.54,"AmazonCounterfactualClassification (en)":66.84,"AmazonPolarityClassification":70.48,"AmazonReviewsClassification (en)":33.65,"Banking77Classification":78.04,"EmotionClassification":39.18,"ImdbClassification":65.14,"MassiveIntentClassification (en)":65.32,"MassiveScenarioClassification (en)":70.51,"MTOPDomainClassification (en)":89.27,"MTOPIntentClassification (en)":65.36,"ToxicConversationsClassification":63.92,"TweetSentimentExtractionClassification":54.8} -{"level_0":172,"index":259,"Rank":173,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.42,"AmazonCounterfactualClassification (en)":69.84,"AmazonPolarityClassification":65.24,"AmazonReviewsClassification (en)":33.95,"Banking77Classification":72.79,"EmotionClassification":35.6,"ImdbClassification":66.32,"MassiveIntentClassification (en)":66.12,"MassiveScenarioClassification (en)":73.74,"MTOPDomainClassification (en)":90.4,"MTOPIntentClassification (en)":62.28,"ToxicConversationsClassification":67.56,"TweetSentimentExtractionClassification":57.24} -{"level_0":173,"index":141,"Rank":174,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.42,"AmazonCounterfactualClassification (en)":81.49,"AmazonPolarityClassification":62.73,"AmazonReviewsClassification (en)":31.55,"Banking77Classification":73.5,"EmotionClassification":38.29,"ImdbClassification":55.75,"MassiveIntentClassification (en)":64.37,"MassiveScenarioClassification (en)":69.05,"MTOPDomainClassification (en)":89.92,"MTOPIntentClassification (en)":70.85,"ToxicConversationsClassification":67.28,"TweetSentimentExtractionClassification":56.23} -{"level_0":174,"index":229,"Rank":175,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":63.05,"AmazonCounterfactualClassification (en)":64.15,"AmazonPolarityClassification":62.58,"AmazonReviewsClassification (en)":31.79,"Banking77Classification":79.75,"EmotionClassification":38.43,"ImdbClassification":60.66,"MassiveIntentClassification (en)":67.4,"MassiveScenarioClassification (en)":75.76,"MTOPDomainClassification (en)":91.56,"MTOPIntentClassification (en)":62.18,"ToxicConversationsClassification":66.99,"TweetSentimentExtractionClassification":55.41} -{"level_0":175,"index":104,"Rank":176,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.94,"AmazonCounterfactualClassification (en)":65.09,"AmazonPolarityClassification":70.04,"AmazonReviewsClassification (en)":35.34,"Banking77Classification":76.37,"EmotionClassification":41.84,"ImdbClassification":62.8,"MassiveIntentClassification (en)":63.51,"MassiveScenarioClassification (en)":71.01,"MTOPDomainClassification (en)":90.8,"MTOPIntentClassification (en)":58.01,"ToxicConversationsClassification":64.31,"TweetSentimentExtractionClassification":56.19} -{"level_0":176,"index":228,"Rank":177,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":62.87,"AmazonCounterfactualClassification (en)":65.28,"AmazonPolarityClassification":62.99,"AmazonReviewsClassification (en)":30.79,"Banking77Classification":80.41,"EmotionClassification":41.17,"ImdbClassification":59.78,"MassiveIntentClassification (en)":67.11,"MassiveScenarioClassification (en)":74.57,"MTOPDomainClassification (en)":91.88,"MTOPIntentClassification (en)":62.83,"ToxicConversationsClassification":63.34,"TweetSentimentExtractionClassification":54.24} -{"level_0":177,"index":73,"Rank":178,"Model":"gte-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.81,"AmazonCounterfactualClassification (en)":68.82,"AmazonPolarityClassification":77.12,"AmazonReviewsClassification (en)":40.94,"Banking77Classification":69.56,"EmotionClassification":46.22,"ImdbClassification":62.17,"MassiveIntentClassification (en)":59.03,"MassiveScenarioClassification (en)":66.59,"MTOPDomainClassification (en)":86.69,"MTOPIntentClassification (en)":49.7,"ToxicConversationsClassification":66.1,"TweetSentimentExtractionClassification":60.78} -{"level_0":178,"index":227,"Rank":179,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":62.71,"AmazonCounterfactualClassification (en)":75.93,"AmazonPolarityClassification":68.95,"AmazonReviewsClassification (en)":35.8,"Banking77Classification":69.85,"EmotionClassification":37.22,"ImdbClassification":62.04,"MassiveIntentClassification (en)":61.46,"MassiveScenarioClassification (en)":66.41,"MTOPDomainClassification (en)":86.06,"MTOPIntentClassification (en)":63.03,"ToxicConversationsClassification":66.9,"TweetSentimentExtractionClassification":58.82} -{"level_0":179,"index":218,"Rank":180,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":62.5,"AmazonCounterfactualClassification (en)":67.09,"AmazonPolarityClassification":74.48,"AmazonReviewsClassification (en)":33.85,"Banking77Classification":73.55,"EmotionClassification":42.22,"ImdbClassification":69.63,"MassiveIntentClassification (en)":59.84,"MassiveScenarioClassification (en)":66.25,"MTOPDomainClassification (en)":81.71,"MTOPIntentClassification (en)":59.23,"ToxicConversationsClassification":68.82,"TweetSentimentExtractionClassification":53.36} -{"level_0":180,"index":248,"Rank":181,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.23,"AmazonCounterfactualClassification (en)":70.97,"AmazonPolarityClassification":66.1,"AmazonReviewsClassification (en)":33.13,"Banking77Classification":78.08,"EmotionClassification":43.35,"ImdbClassification":59.35,"MassiveIntentClassification (en)":63.83,"MassiveScenarioClassification (en)":66.96,"MTOPDomainClassification (en)":81.05,"MTOPIntentClassification (en)":62.79,"ToxicConversationsClassification":65.97,"TweetSentimentExtractionClassification":55.22} -{"level_0":181,"index":134,"Rank":182,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":61.67,"AmazonCounterfactualClassification (en)":61.79,"AmazonPolarityClassification":62.36,"AmazonReviewsClassification (en)":29.59,"Banking77Classification":78.6,"EmotionClassification":39.6,"ImdbClassification":61.22,"MassiveIntentClassification (en)":66.78,"MassiveScenarioClassification (en)":73.78,"MTOPDomainClassification (en)":89.97,"MTOPIntentClassification (en)":59.57,"ToxicConversationsClassification":65.12,"TweetSentimentExtractionClassification":51.63} -{"level_0":182,"index":122,"Rank":183,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":61.66,"AmazonCounterfactualClassification (en)":74.25,"AmazonPolarityClassification":71.33,"AmazonReviewsClassification (en)":33.56,"Banking77Classification":63.41,"EmotionClassification":35.28,"ImdbClassification":65.35,"MassiveIntentClassification (en)":59.88,"MassiveScenarioClassification (en)":64.28,"MTOPDomainClassification (en)":82.63,"MTOPIntentClassification (en)":68.14,"ToxicConversationsClassification":70.0,"TweetSentimentExtractionClassification":51.81} -{"level_0":183,"index":81,"Rank":184,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.46,"AmazonCounterfactualClassification (en)":65.88,"AmazonPolarityClassification":74.94,"AmazonReviewsClassification (en)":35.1,"Banking77Classification":74.68,"EmotionClassification":42.23,"ImdbClassification":62.9,"MassiveIntentClassification (en)":58.08,"MassiveScenarioClassification (en)":66.34,"MTOPDomainClassification (en)":81.52,"MTOPIntentClassification (en)":58.24,"ToxicConversationsClassification":62.79,"TweetSentimentExtractionClassification":54.82} -{"level_0":184,"index":128,"Rank":185,"Model":"cai-stellaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.44,"AmazonCounterfactualClassification (en)":64.87,"AmazonPolarityClassification":65.17,"AmazonReviewsClassification (en)":31.02,"Banking77Classification":74.12,"EmotionClassification":36.71,"ImdbClassification":66.9,"MassiveIntentClassification (en)":61.95,"MassiveScenarioClassification (en)":70.2,"MTOPDomainClassification (en)":89.13,"MTOPIntentClassification (en)":62.62,"ToxicConversationsClassification":63.57,"TweetSentimentExtractionClassification":51.04} -{"level_0":185,"index":256,"Rank":186,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.37,"AmazonCounterfactualClassification (en)":76.09,"AmazonPolarityClassification":64.51,"AmazonReviewsClassification (en)":32.07,"Banking77Classification":70.88,"EmotionClassification":32.77,"ImdbClassification":62.15,"MassiveIntentClassification (en)":61.44,"MassiveScenarioClassification (en)":66.07,"MTOPDomainClassification (en)":85.0,"MTOPIntentClassification (en)":68.76,"ToxicConversationsClassification":67.81,"TweetSentimentExtractionClassification":48.92} -{"level_0":186,"index":80,"Rank":187,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.72,"AmazonCounterfactualClassification (en)":61.24,"AmazonPolarityClassification":65.4,"AmazonReviewsClassification (en)":31.17,"Banking77Classification":77.7,"EmotionClassification":39.08,"ImdbClassification":58.67,"MassiveIntentClassification (en)":61.41,"MassiveScenarioClassification (en)":69.74,"MTOPDomainClassification (en)":86.96,"MTOPIntentClassification (en)":62.25,"ToxicConversationsClassification":62.66,"TweetSentimentExtractionClassification":52.41} -{"level_0":187,"index":173,"Rank":188,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.56,"AmazonCounterfactualClassification (en)":64.82,"AmazonPolarityClassification":64.28,"AmazonReviewsClassification (en)":30.62,"Banking77Classification":74.64,"EmotionClassification":36.08,"ImdbClassification":58.71,"MassiveIntentClassification (en)":64.67,"MassiveScenarioClassification (en)":71.79,"MTOPDomainClassification (en)":88.82,"MTOPIntentClassification (en)":58.61,"ToxicConversationsClassification":59.44,"TweetSentimentExtractionClassification":54.26} -{"level_0":188,"index":233,"Rank":189,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":57.65,"AmazonCounterfactualClassification (en)":60.54,"AmazonPolarityClassification":59.59,"AmazonReviewsClassification (en)":31.01,"Banking77Classification":67.05,"EmotionClassification":33.18,"ImdbClassification":63.98,"MassiveIntentClassification (en)":57.21,"MassiveScenarioClassification (en)":66.11,"MTOPDomainClassification (en)":78.57,"MTOPIntentClassification (en)":57.07,"ToxicConversationsClassification":67.76,"TweetSentimentExtractionClassification":49.68} -{"level_0":189,"index":232,"Rank":190,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":57.29,"AmazonCounterfactualClassification (en)":56.91,"AmazonPolarityClassification":60.32,"AmazonReviewsClassification (en)":29.67,"Banking77Classification":67.69,"EmotionClassification":36.93,"ImdbClassification":62.57,"MassiveIntentClassification (en)":56.19,"MassiveScenarioClassification (en)":66.03,"MTOPDomainClassification (en)":79.11,"MTOPIntentClassification (en)":55.85,"ToxicConversationsClassification":65.4,"TweetSentimentExtractionClassification":50.8} -{"level_0":190,"index":11,"Rank":191,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":53.18,"AmazonCounterfactualClassification (en)":76.84,"AmazonPolarityClassification":61.01,"AmazonReviewsClassification (en)":28.71,"Banking77Classification":57.76,"EmotionClassification":24.83,"ImdbClassification":57.58,"MassiveIntentClassification (en)":47.91,"MassiveScenarioClassification (en)":55.92,"MTOPDomainClassification (en)":75.36,"MTOPIntentClassification (en)":49.47,"ToxicConversationsClassification":54.05,"TweetSentimentExtractionClassification":48.73} -{"level_0":191,"index":231,"Rank":192,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":52.37,"AmazonCounterfactualClassification (en)":58.7,"AmazonPolarityClassification":57.77,"AmazonReviewsClassification (en)":26.26,"Banking77Classification":66.66,"EmotionClassification":24.82,"ImdbClassification":56.35,"MassiveIntentClassification (en)":51.73,"MassiveScenarioClassification (en)":58.58,"MTOPDomainClassification (en)":74.53,"MTOPIntentClassification (en)":50.05,"ToxicConversationsClassification":57.44,"TweetSentimentExtractionClassification":45.52} -{"level_0":192,"index":255,"Rank":193,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.73,"AmazonCounterfactualClassification (en)":50.81,"AmazonPolarityClassification":52.57,"AmazonReviewsClassification (en)":22.63,"Banking77Classification":36.43,"EmotionClassification":22.75,"ImdbClassification":50.76,"MassiveIntentClassification (en)":34.33,"MassiveScenarioClassification (en)":44.13,"MTOPDomainClassification (en)":61.03,"MTOPIntentClassification (en)":29.68,"ToxicConversationsClassification":54.93,"TweetSentimentExtractionClassification":40.75} -{"level_0":193,"index":2,"Rank":194,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":61.85,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":"","Banking77Classification":83.21,"EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""} -{"level_0":194,"index":30,"Rank":204,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":69.91,"AmazonPolarityClassification":72.95,"AmazonReviewsClassification (en)":"","Banking77Classification":64.12,"EmotionClassification":31.51,"ImdbClassification":65.17,"MassiveIntentClassification (en)":60.48,"MassiveScenarioClassification (en)":68.76,"MTOPDomainClassification (en)":81.21,"MTOPIntentClassification (en)":51.56,"ToxicConversationsClassification":65.65,"TweetSentimentExtractionClassification":57.41} -{"level_0":195,"index":31,"Rank":205,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":"","Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":53.41,"MassiveScenarioClassification (en)":61.78,"MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""} -{"level_0":196,"index":32,"Rank":206,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":"","Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":54.76,"MassiveScenarioClassification (en)":63.42,"MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""} -{"level_0":197,"index":37,"Rank":207,"Model":"openai_clip_embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":57.49,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":30.59,"Banking77Classification":73.42,"EmotionClassification":33.62,"ImdbClassification":56.17,"MassiveIntentClassification (en)":62.96,"MassiveScenarioClassification (en)":71.47,"MTOPDomainClassification (en)":87.07,"MTOPIntentClassification (en)":61.96,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":49.34} -{"level_0":198,"index":52,"Rank":218,"Model":"gemma-2b-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":67.49,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":34.9,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""} -{"level_0":199,"index":97,"Rank":232,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":84.82,"AmazonPolarityClassification":76.88,"AmazonReviewsClassification (en)":36.72,"Banking77Classification":"","EmotionClassification":41.93,"ImdbClassification":"","MassiveIntentClassification (en)":65.91,"MassiveScenarioClassification (en)":67.62,"MTOPDomainClassification (en)":87.95,"MTOPIntentClassification (en)":78.43,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":56.28} -{"level_0":200,"index":98,"Rank":233,"Model":"yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":74.78,"AmazonPolarityClassification":71.89,"AmazonReviewsClassification (en)":36.7,"Banking77Classification":81.37,"EmotionClassification":42.6,"ImdbClassification":63.96,"MassiveIntentClassification (en)":68.56,"MassiveScenarioClassification (en)":74.15,"MTOPDomainClassification (en)":90.19,"MTOPIntentClassification (en)":69.5,"ToxicConversationsClassification":69.85,"TweetSentimentExtractionClassification":""} -{"level_0":201,"index":196,"Rank":253,"Model":"fin-mpnet-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":29.13,"Banking77Classification":80.25,"EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""} -{"level_0":202,"index":200,"Rank":254,"Model":"bge-large-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":75.54,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":"","Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""} -{"level_0":203,"index":241,"Rank":265,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AmazonCounterfactualClassification (en)":71.57,"AmazonPolarityClassification":69.21,"AmazonReviewsClassification (en)":35.11,"Banking77Classification":79.77,"EmotionClassification":42.37,"ImdbClassification":60.46,"MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":87.06,"MTOPIntentClassification (en)":65.52,"ToxicConversationsClassification":66.07,"TweetSentimentExtractionClassification":56.12} -{"level_0":204,"index":242,"Rank":266,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (en)":75.81,"AmazonPolarityClassification":76.41,"AmazonReviewsClassification (en)":38.51,"Banking77Classification":81.07,"EmotionClassification":45.83,"ImdbClassification":64.57,"MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":89.24,"MTOPIntentClassification (en)":68.69,"ToxicConversationsClassification":71.02,"TweetSentimentExtractionClassification":59.03} -{"level_0":205,"index":247,"Rank":267,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":"","Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":50.68,"MassiveScenarioClassification (en)":60.82,"MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""} -{"level_0":206,"index":262,"Rank":270,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":80.3,"AmazonReviewsClassification (en)":"","Banking77Classification":85.19,"EmotionClassification":48.22,"ImdbClassification":69.87,"MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":75.01,"TweetSentimentExtractionClassification":61.8} -{"level_0":207,"index":263,"Rank":271,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":0.86,"AmazonReviewsClassification (en)":"","Banking77Classification":0.81,"EmotionClassification":0.48,"ImdbClassification":0.74,"MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":0.69,"TweetSentimentExtractionClassification":0.61} -{"level_0":208,"index":267,"Rank":275,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":76.66,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":46.43,"Banking77Classification":73.74,"EmotionClassification":47.44,"ImdbClassification":79.55,"MassiveIntentClassification (en)":65.61,"MassiveScenarioClassification (en)":71.05,"MTOPDomainClassification (en)":90.48,"MTOPIntentClassification (en)":58.11,"ToxicConversationsClassification":63.53,"TweetSentimentExtractionClassification":61.77} +{"Rank":1,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":83.05,"AmazonPolarityClassification":97.34,"Banking77Classification":88.62,"EmotionClassification":52.51,"ImdbClassification":95.65,"ToxicConversationsClassification":89.67,"TweetSentimentExtractionClassification":74.52} +{"Rank":2,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.44,"AmazonPolarityClassification":96.58,"Banking77Classification":86.96,"EmotionClassification":59.81,"ImdbClassification":96.13,"ToxicConversationsClassification":83.58,"TweetSentimentExtractionClassification":71.55} +{"Rank":3,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":81.98,"AmazonPolarityClassification":97.34,"Banking77Classification":86.01,"EmotionClassification":51.53,"ImdbClassification":95.7,"ToxicConversationsClassification":88.33,"TweetSentimentExtractionClassification":72.97} +{"Rank":4,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":79.28,"AmazonPolarityClassification":96.32,"Banking77Classification":88.59,"EmotionClassification":50.28,"ImdbClassification":95.75,"ToxicConversationsClassification":81.75,"TweetSentimentExtractionClassification":62.98} +{"Rank":5,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":78.87,"AmazonPolarityClassification":96.7,"Banking77Classification":81.68,"EmotionClassification":54.53,"ImdbClassification":95.58,"ToxicConversationsClassification":78.75,"TweetSentimentExtractionClassification":66.0} +{"Rank":6,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.12,"AmazonPolarityClassification":96.41,"Banking77Classification":81.64,"EmotionClassification":48.29,"ImdbClassification":95.49,"ToxicConversationsClassification":75.45,"TweetSentimentExtractionClassification":59.44} +{"Rank":7,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.49,"AmazonPolarityClassification":92.85,"Banking77Classification":85.69,"EmotionClassification":51.58,"ImdbClassification":87.67,"ToxicConversationsClassification":72.92,"TweetSentimentExtractionClassification":62.22} +{"Rank":8,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":75.05,"AmazonPolarityClassification":91.12,"Banking77Classification":88.31,"EmotionClassification":52.04,"ImdbClassification":87.42,"ToxicConversationsClassification":69.26,"TweetSentimentExtractionClassification":62.14} +{"Rank":9,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":74.58,"AmazonPolarityClassification":89.69,"Banking77Classification":88.17,"EmotionClassification":51.71,"ImdbClassification":85.78,"ToxicConversationsClassification":71.01,"TweetSentimentExtractionClassification":61.11} +{"Rank":10,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":74.32,"AmazonPolarityClassification":92.79,"Banking77Classification":82.31,"EmotionClassification":48.57,"ImdbClassification":90.23,"ToxicConversationsClassification":70.04,"TweetSentimentExtractionClassification":62.01} +{"Rank":11,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":74.28,"AmazonPolarityClassification":93.17,"Banking77Classification":80.88,"EmotionClassification":51.95,"ImdbClassification":87.54,"ToxicConversationsClassification":70.95,"TweetSentimentExtractionClassification":61.21} +{"Rank":12,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":74.02,"AmazonPolarityClassification":92.87,"Banking77Classification":78.46,"EmotionClassification":51.74,"ImdbClassification":87.01,"ToxicConversationsClassification":71.73,"TweetSentimentExtractionClassification":62.33} +{"Rank":13,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.63,"AmazonPolarityClassification":90.84,"Banking77Classification":83.01,"EmotionClassification":50.63,"ImdbClassification":83.66,"ToxicConversationsClassification":71.91,"TweetSentimentExtractionClassification":61.72} +{"Rank":14,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":73.48,"AmazonPolarityClassification":91.89,"Banking77Classification":84.15,"EmotionClassification":47.73,"ImdbClassification":85.47,"ToxicConversationsClassification":71.25,"TweetSentimentExtractionClassification":60.4} +{"Rank":15,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":73.46,"AmazonPolarityClassification":86.07,"Banking77Classification":88.05,"EmotionClassification":51.2,"ImdbClassification":82.94,"ToxicConversationsClassification":70.59,"TweetSentimentExtractionClassification":61.9} +{"Rank":16,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":72.63,"AmazonPolarityClassification":93.26,"Banking77Classification":75.88,"EmotionClassification":47.58,"ImdbClassification":90.23,"ToxicConversationsClassification":66.01,"TweetSentimentExtractionClassification":62.8} +{"Rank":17,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":72.5,"AmazonPolarityClassification":91.35,"Banking77Classification":83.69,"EmotionClassification":45.88,"ImdbClassification":83.99,"ToxicConversationsClassification":70.87,"TweetSentimentExtractionClassification":59.2} +{"Rank":18,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.32,"AmazonPolarityClassification":92.83,"Banking77Classification":68.04,"EmotionClassification":50.33,"ImdbClassification":89.38,"ToxicConversationsClassification":70.0,"TweetSentimentExtractionClassification":63.35} +{"Rank":19,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.29,"AmazonPolarityClassification":91.32,"Banking77Classification":83.19,"EmotionClassification":45.8,"ImdbClassification":85.93,"ToxicConversationsClassification":68.52,"TweetSentimentExtractionClassification":58.98} +{"Rank":20,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":71.65,"AmazonPolarityClassification":85.29,"Banking77Classification":86.16,"EmotionClassification":48.88,"ImdbClassification":77.95,"ToxicConversationsClassification":70.71,"TweetSentimentExtractionClassification":60.9} +{"Rank":21,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.37,"AmazonPolarityClassification":86.72,"Banking77Classification":80.66,"EmotionClassification":48.74,"ImdbClassification":77.98,"ToxicConversationsClassification":72.29,"TweetSentimentExtractionClassification":61.81} +{"Rank":22,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":70.41,"AmazonPolarityClassification":82.05,"Banking77Classification":86.01,"EmotionClassification":48.38,"ImdbClassification":75.33,"ToxicConversationsClassification":69.92,"TweetSentimentExtractionClassification":60.76} +{"Rank":23,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":70.4,"AmazonPolarityClassification":91.76,"Banking77Classification":73.53,"EmotionClassification":45.68,"ImdbClassification":84.29,"ToxicConversationsClassification":64.33,"TweetSentimentExtractionClassification":62.8} +{"Rank":24,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":70.24,"AmazonPolarityClassification":88.74,"Banking77Classification":82.78,"EmotionClassification":42.92,"ImdbClassification":80.87,"ToxicConversationsClassification":68.16,"TweetSentimentExtractionClassification":57.99} +{"Rank":25,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":70.2,"AmazonPolarityClassification":85.12,"Banking77Classification":76.48,"EmotionClassification":51.35,"ImdbClassification":77.34,"ToxicConversationsClassification":68.2,"TweetSentimentExtractionClassification":62.71} +{"Rank":26,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":69.69,"AmazonPolarityClassification":86.69,"Banking77Classification":79.36,"EmotionClassification":48.79,"ImdbClassification":82.25,"ToxicConversationsClassification":63.9,"TweetSentimentExtractionClassification":57.14} +{"Rank":27,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":69.53,"AmazonPolarityClassification":80.68,"Banking77Classification":84.77,"EmotionClassification":47.08,"ImdbClassification":75.19,"ToxicConversationsClassification":71.85,"TweetSentimentExtractionClassification":57.61} +{"Rank":28,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":69.16,"AmazonPolarityClassification":79.05,"Banking77Classification":84.65,"EmotionClassification":46.58,"ImdbClassification":75.68,"ToxicConversationsClassification":71.81,"TweetSentimentExtractionClassification":57.17} +{"Rank":29,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":68.06,"AmazonPolarityClassification":82.47,"Banking77Classification":75.76,"EmotionClassification":44.81,"ImdbClassification":73.53,"ToxicConversationsClassification":72.04,"TweetSentimentExtractionClassification":59.73} +{"Rank":30,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":67.98,"AmazonPolarityClassification":88.61,"Banking77Classification":70.44,"EmotionClassification":42.86,"ImdbClassification":79.57,"ToxicConversationsClassification":63.59,"TweetSentimentExtractionClassification":62.79} +{"Rank":31,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":67.58,"AmazonPolarityClassification":85.92,"Banking77Classification":80.63,"EmotionClassification":40.55,"ImdbClassification":76.6,"ToxicConversationsClassification":66.53,"TweetSentimentExtractionClassification":55.23} +{"Rank":32,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":65.84,"AmazonPolarityClassification":73.92,"Banking77Classification":81.21,"EmotionClassification":46.33,"ImdbClassification":70.86,"ToxicConversationsClassification":68.65,"TweetSentimentExtractionClassification":54.09} +{"Rank":33,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":65.73,"AmazonPolarityClassification":75.05,"Banking77Classification":82.32,"EmotionClassification":43.19,"ImdbClassification":70.8,"ToxicConversationsClassification":68.48,"TweetSentimentExtractionClassification":54.54} +{"Rank":34,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":65.47,"AmazonPolarityClassification":74.58,"Banking77Classification":82.22,"EmotionClassification":45.54,"ImdbClassification":68.15,"ToxicConversationsClassification":67.56,"TweetSentimentExtractionClassification":54.77} +{"Rank":35,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":65.42,"AmazonPolarityClassification":76.41,"Banking77Classification":81.1,"EmotionClassification":45.85,"ImdbClassification":64.58,"ToxicConversationsClassification":65.56,"TweetSentimentExtractionClassification":59.04} +{"Rank":36,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":65.42,"AmazonPolarityClassification":74.28,"Banking77Classification":79.0,"EmotionClassification":42.85,"ImdbClassification":71.92,"ToxicConversationsClassification":68.4,"TweetSentimentExtractionClassification":56.08} +{"Rank":37,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":64.05,"AmazonPolarityClassification":68.63,"Banking77Classification":80.02,"EmotionClassification":44.77,"ImdbClassification":67.04,"ToxicConversationsClassification":67.77,"TweetSentimentExtractionClassification":56.1} +{"Rank":38,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":63.68,"AmazonPolarityClassification":74.48,"Banking77Classification":73.55,"EmotionClassification":42.22,"ImdbClassification":69.63,"ToxicConversationsClassification":68.82,"TweetSentimentExtractionClassification":53.36} +{"Rank":39,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":63.06,"AmazonPolarityClassification":67.14,"Banking77Classification":81.7,"EmotionClassification":42.22,"ImdbClassification":71.17,"ToxicConversationsClassification":61.05,"TweetSentimentExtractionClassification":55.05} +{"Rank":40,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":62.98,"AmazonPolarityClassification":67.82,"Banking77Classification":79.26,"EmotionClassification":42.2,"ImdbClassification":65.99,"ToxicConversationsClassification":66.6,"TweetSentimentExtractionClassification":56.02} +{"Rank":41,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":62.89,"AmazonPolarityClassification":61.91,"Banking77Classification":82.05,"EmotionClassification":46.65,"ImdbClassification":65.02,"ToxicConversationsClassification":68.15,"TweetSentimentExtractionClassification":53.57} +{"Rank":42,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":62.33,"AmazonPolarityClassification":69.21,"Banking77Classification":79.77,"EmotionClassification":42.37,"ImdbClassification":60.46,"ToxicConversationsClassification":66.07,"TweetSentimentExtractionClassification":56.12} +{"Rank":43,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":61.84,"AmazonPolarityClassification":66.88,"Banking77Classification":82.35,"EmotionClassification":41.91,"ImdbClassification":60.17,"ToxicConversationsClassification":64.01,"TweetSentimentExtractionClassification":55.74} +{"Rank":44,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":61.68,"AmazonPolarityClassification":68.0,"Banking77Classification":71.48,"EmotionClassification":40.04,"ImdbClassification":61.52,"ToxicConversationsClassification":69.09,"TweetSentimentExtractionClassification":59.97} +{"Rank":45,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":61.0,"AmazonPolarityClassification":62.98,"Banking77Classification":80.4,"EmotionClassification":41.17,"ImdbClassification":59.76,"ToxicConversationsClassification":67.47,"TweetSentimentExtractionClassification":54.25} +{"Rank":46,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":60.63,"AmazonPolarityClassification":68.95,"Banking77Classification":69.85,"EmotionClassification":37.22,"ImdbClassification":62.04,"ToxicConversationsClassification":66.9,"TweetSentimentExtractionClassification":58.82} +{"Rank":47,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":60.5,"AmazonPolarityClassification":64.26,"Banking77Classification":80.04,"EmotionClassification":40.83,"ImdbClassification":61.76,"ToxicConversationsClassification":62.09,"TweetSentimentExtractionClassification":54.04} +{"Rank":48,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":59.53,"AmazonPolarityClassification":71.33,"Banking77Classification":63.41,"EmotionClassification":35.28,"ImdbClassification":65.35,"ToxicConversationsClassification":70.0,"TweetSentimentExtractionClassification":51.81} +{"Rank":49,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":57.28,"AmazonPolarityClassification":60.32,"Banking77Classification":67.69,"EmotionClassification":36.93,"ImdbClassification":62.57,"ToxicConversationsClassification":65.4,"TweetSentimentExtractionClassification":50.8} +{"Rank":50,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":56.87,"AmazonPolarityClassification":59.59,"Banking77Classification":67.05,"EmotionClassification":33.18,"ImdbClassification":63.98,"ToxicConversationsClassification":67.76,"TweetSentimentExtractionClassification":49.68} +{"Rank":51,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":54.86,"AmazonPolarityClassification":68.36,"Banking77Classification":59.86,"EmotionClassification":29.5,"ImdbClassification":58.36,"ToxicConversationsClassification":57.77,"TweetSentimentExtractionClassification":55.3} +{"Rank":52,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":51.43,"AmazonPolarityClassification":57.77,"Banking77Classification":66.66,"EmotionClassification":24.82,"ImdbClassification":56.35,"ToxicConversationsClassification":57.44,"TweetSentimentExtractionClassification":45.52} +{"Rank":53,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":50.66,"AmazonPolarityClassification":61.01,"Banking77Classification":57.76,"EmotionClassification":24.83,"ImdbClassification":57.58,"ToxicConversationsClassification":54.05,"TweetSentimentExtractionClassification":48.73} +{"Rank":54,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":83.21,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":55,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":56,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":57,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":58,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":59,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":60,"Model":"bm25s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":61,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":62,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":63,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":64,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":65,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":66,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":67,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":68,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":69,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":70,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":71,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":72,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":73,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":74,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":75,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":76,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":77,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":78,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":79,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":80,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":81,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":82,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":83,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":84,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":85,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":86,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":87,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":88,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":89,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":90,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":91,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":92,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":93,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":94,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":95,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":96,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":97,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":98,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":99,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":100,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":101,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":102,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":103,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":104,"Model":"text-search-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":105,"Model":"text-search-ada-doc-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":106,"Model":"text-search-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":107,"Model":"text-search-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":108,"Model":"text-search-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":109,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":110,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} +{"Rank":111,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonPolarityClassification":null,"Banking77Classification":null,"EmotionClassification":null,"ImdbClassification":null,"ToxicConversationsClassification":null,"TweetSentimentExtractionClassification":null} diff --git a/boards_data/en/data_tasks/Clustering/default.jsonl b/boards_data/en/data_tasks/Clustering/default.jsonl index 25fba3ccffc390a768467d189f18247ecc19b8c2..1b5f3c6a468ac3e1a6cf1390446b180cabfdc123 100644 --- a/boards_data/en/data_tasks/Clustering/default.jsonl +++ b/boards_data/en/data_tasks/Clustering/default.jsonl @@ -1,205 +1,111 @@ -{"level_0":0,"index":21,"Rank":1,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.89,"ArxivClusteringP2P":54.44,"ArxivClusteringS2S":49.33,"BiorxivClusteringP2P":53.05,"BiorxivClusteringS2S":48.38,"MedrxivClusteringP2P":45.86,"MedrxivClusteringS2S":44.33,"RedditClustering":72.33,"RedditClusteringP2P":72.72,"StackExchangeClustering":81.32,"StackExchangeClusteringP2P":46.05,"TwentyNewsgroupsClustering":68.98} -{"level_0":1,"index":138,"Rank":2,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.69,"ArxivClusteringP2P":55.44,"ArxivClusteringS2S":50.66,"BiorxivClusteringP2P":50.68,"BiorxivClusteringS2S":46.87,"MedrxivClusteringP2P":46.87,"MedrxivClusteringS2S":44.65,"RedditClustering":72.86,"RedditClusteringP2P":75.27,"StackExchangeClustering":80.29,"StackExchangeClusteringP2P":49.57,"TwentyNewsgroupsClustering":61.43} -{"level_0":2,"index":17,"Rank":3,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":56.92,"ArxivClusteringP2P":56.46,"ArxivClusteringS2S":51.74,"BiorxivClusteringP2P":50.09,"BiorxivClusteringS2S":46.65,"MedrxivClusteringP2P":46.23,"MedrxivClusteringS2S":44.13,"RedditClustering":73.55,"RedditClusteringP2P":74.13,"StackExchangeClustering":79.86,"StackExchangeClusteringP2P":49.41,"TwentyNewsgroupsClustering":53.91} -{"level_0":3,"index":205,"Rank":4,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.92,"ArxivClusteringP2P":56.46,"ArxivClusteringS2S":51.74,"BiorxivClusteringP2P":50.09,"BiorxivClusteringS2S":46.65,"MedrxivClusteringP2P":46.23,"MedrxivClusteringS2S":44.13,"RedditClustering":73.55,"RedditClusteringP2P":74.13,"StackExchangeClustering":79.86,"StackExchangeClusteringP2P":49.41,"TwentyNewsgroupsClustering":53.91} -{"level_0":4,"index":126,"Rank":5,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.92,"ArxivClusteringP2P":56.46,"ArxivClusteringS2S":51.74,"BiorxivClusteringP2P":50.09,"BiorxivClusteringS2S":46.65,"MedrxivClusteringP2P":46.23,"MedrxivClusteringS2S":44.13,"RedditClustering":73.55,"RedditClusteringP2P":74.13,"StackExchangeClustering":79.86,"StackExchangeClusteringP2P":49.41,"TwentyNewsgroupsClustering":53.91} -{"level_0":5,"index":139,"Rank":6,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.7,"ArxivClusteringP2P":55.16,"ArxivClusteringS2S":49.82,"BiorxivClusteringP2P":50.68,"BiorxivClusteringS2S":45.81,"MedrxivClusteringP2P":46.32,"MedrxivClusteringS2S":44.29,"RedditClustering":71.19,"RedditClusteringP2P":74.42,"StackExchangeClustering":78.49,"StackExchangeClusteringP2P":48.9,"TwentyNewsgroupsClustering":58.57} -{"level_0":6,"index":95,"Rank":7,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.17,"ArxivClusteringP2P":54.02,"ArxivClusteringS2S":48.82,"BiorxivClusteringP2P":50.76,"BiorxivClusteringS2S":46.57,"MedrxivClusteringP2P":46.66,"MedrxivClusteringS2S":44.18,"RedditClustering":62.92,"RedditClusteringP2P":72.74,"StackExchangeClustering":76.48,"StackExchangeClusteringP2P":48.29,"TwentyNewsgroupsClustering":66.42} -{"level_0":7,"index":15,"Rank":8,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":55.83,"ArxivClusteringP2P":56.4,"ArxivClusteringS2S":51.45,"BiorxivClusteringP2P":49.01,"BiorxivClusteringS2S":45.06,"MedrxivClusteringP2P":44.37,"MedrxivClusteringS2S":42.0,"RedditClustering":73.37,"RedditClusteringP2P":72.51,"StackExchangeClustering":79.07,"StackExchangeClusteringP2P":49.57,"TwentyNewsgroupsClustering":51.31} -{"level_0":8,"index":23,"Rank":9,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.65,"ArxivClusteringP2P":54.91,"ArxivClusteringS2S":50.28,"BiorxivClusteringP2P":52.64,"BiorxivClusteringS2S":49.2,"MedrxivClusteringP2P":45.81,"MedrxivClusteringS2S":44.11,"RedditClustering":56.03,"RedditClusteringP2P":65.83,"StackExchangeClustering":66.21,"StackExchangeClusteringP2P":45.74,"TwentyNewsgroupsClustering":70.44} -{"level_0":9,"index":51,"Rank":10,"Model":"neural-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.32,"ArxivClusteringP2P":53.45,"ArxivClusteringS2S":48.82,"BiorxivClusteringP2P":48.82,"BiorxivClusteringS2S":45.07,"MedrxivClusteringP2P":42.63,"MedrxivClusteringS2S":40.97,"RedditClustering":66.44,"RedditClusteringP2P":69.56,"StackExchangeClustering":75.95,"StackExchangeClusteringP2P":46.2,"TwentyNewsgroupsClustering":59.6} -{"level_0":10,"index":6,"Rank":11,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.35,"ArxivClusteringP2P":51.81,"ArxivClusteringS2S":44.73,"BiorxivClusteringP2P":46.07,"BiorxivClusteringS2S":40.64,"MedrxivClusteringP2P":42.94,"MedrxivClusteringS2S":41.44,"RedditClustering":68.5,"RedditClusteringP2P":64.86,"StackExchangeClustering":74.16,"StackExchangeClusteringP2P":45.1,"TwentyNewsgroupsClustering":66.62} -{"level_0":11,"index":215,"Rank":12,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Average":52.8,"ArxivClusteringP2P":53.76,"ArxivClusteringS2S":49.59,"BiorxivClusteringP2P":48.15,"BiorxivClusteringS2S":44.74,"MedrxivClusteringP2P":39.24,"MedrxivClusteringS2S":36.98,"RedditClustering":63.2,"RedditClusteringP2P":68.01,"StackExchangeClustering":74.99,"StackExchangeClusteringP2P":42.04,"TwentyNewsgroupsClustering":60.13} -{"level_0":12,"index":9,"Rank":13,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":52.42,"ArxivClusteringP2P":51.95,"ArxivClusteringS2S":42.48,"BiorxivClusteringP2P":50.15,"BiorxivClusteringS2S":42.84,"MedrxivClusteringP2P":47.24,"MedrxivClusteringS2S":43.48,"RedditClustering":63.73,"RedditClusteringP2P":64.09,"StackExchangeClustering":70.71,"StackExchangeClusteringP2P":40.34,"TwentyNewsgroupsClustering":59.56} -{"level_0":13,"index":96,"Rank":14,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":51.67,"ArxivClusteringP2P":52.08,"ArxivClusteringS2S":47.38,"BiorxivClusteringP2P":43.94,"BiorxivClusteringS2S":41.14,"MedrxivClusteringP2P":40.03,"MedrxivClusteringS2S":39.0,"RedditClustering":59.9,"RedditClusteringP2P":67.64,"StackExchangeClustering":74.25,"StackExchangeClusteringP2P":46.78,"TwentyNewsgroupsClustering":56.27} -{"level_0":14,"index":58,"Rank":15,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":51.42,"ArxivClusteringP2P":51.48,"ArxivClusteringS2S":47.3,"BiorxivClusteringP2P":42.73,"BiorxivClusteringS2S":39.58,"MedrxivClusteringP2P":37.84,"MedrxivClusteringS2S":36.65,"RedditClustering":61.52,"RedditClusteringP2P":68.24,"StackExchangeClustering":76.94,"StackExchangeClusteringP2P":46.04,"TwentyNewsgroupsClustering":57.3} -{"level_0":15,"index":42,"Rank":16,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":50.61,"ArxivClusteringP2P":51.67,"ArxivClusteringS2S":48.11,"BiorxivClusteringP2P":40.87,"BiorxivClusteringS2S":39.8,"MedrxivClusteringP2P":36.52,"MedrxivClusteringS2S":36.8,"RedditClustering":61.3,"RedditClusteringP2P":67.26,"StackExchangeClustering":77.33,"StackExchangeClusteringP2P":41.33,"TwentyNewsgroupsClustering":55.7} -{"level_0":16,"index":156,"Rank":17,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":50.26,"ArxivClusteringP2P":50.45,"ArxivClusteringS2S":45.5,"BiorxivClusteringP2P":43.53,"BiorxivClusteringS2S":40.24,"MedrxivClusteringP2P":38.19,"MedrxivClusteringS2S":37.45,"RedditClustering":57.71,"RedditClusteringP2P":66.49,"StackExchangeClustering":73.1,"StackExchangeClusteringP2P":45.91,"TwentyNewsgroupsClustering":54.31} -{"level_0":17,"index":43,"Rank":18,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.14,"ArxivClusteringP2P":50.72,"ArxivClusteringS2S":48.01,"BiorxivClusteringP2P":41.41,"BiorxivClusteringS2S":38.67,"MedrxivClusteringP2P":36.54,"MedrxivClusteringS2S":37.24,"RedditClustering":63.01,"RedditClusteringP2P":65.86,"StackExchangeClustering":74.41,"StackExchangeClusteringP2P":38.52,"TwentyNewsgroupsClustering":57.16} -{"level_0":18,"index":283,"Rank":19,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.01,"ArxivClusteringP2P":49.01,"ArxivClusteringS2S":44.45,"BiorxivClusteringP2P":38.03,"BiorxivClusteringS2S":36.53,"MedrxivClusteringP2P":32.7,"MedrxivClusteringS2S":31.27,"RedditClustering":67.84,"RedditClusteringP2P":67.96,"StackExchangeClustering":76.26,"StackExchangeClusteringP2P":36.88,"TwentyNewsgroupsClustering":58.14} -{"level_0":19,"index":204,"Rank":20,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.75,"ArxivClusteringP2P":50.51,"ArxivClusteringS2S":45.01,"BiorxivClusteringP2P":43.21,"BiorxivClusteringS2S":38.82,"MedrxivClusteringP2P":39.39,"MedrxivClusteringS2S":37.9,"RedditClustering":55.82,"RedditClusteringP2P":62.09,"StackExchangeClustering":67.65,"StackExchangeClusteringP2P":46.31,"TwentyNewsgroupsClustering":49.58} -{"level_0":20,"index":16,"Rank":21,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.75,"ArxivClusteringP2P":50.51,"ArxivClusteringS2S":45.01,"BiorxivClusteringP2P":43.21,"BiorxivClusteringS2S":38.82,"MedrxivClusteringP2P":39.39,"MedrxivClusteringS2S":37.9,"RedditClustering":55.82,"RedditClusteringP2P":62.09,"StackExchangeClustering":67.65,"StackExchangeClusteringP2P":46.31,"TwentyNewsgroupsClustering":49.58} -{"level_0":21,"index":19,"Rank":22,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.96,"ArxivClusteringP2P":48.47,"ArxivClusteringS2S":43.39,"BiorxivClusteringP2P":40.58,"BiorxivClusteringS2S":37.94,"MedrxivClusteringP2P":35.04,"MedrxivClusteringS2S":32.94,"RedditClustering":59.76,"RedditClusteringP2P":68.03,"StackExchangeClustering":71.27,"StackExchangeClusteringP2P":39.18,"TwentyNewsgroupsClustering":50.91} -{"level_0":22,"index":219,"Rank":23,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.86,"ArxivClusteringP2P":48.78,"ArxivClusteringS2S":46.11,"BiorxivClusteringP2P":38.79,"BiorxivClusteringS2S":37.37,"MedrxivClusteringP2P":32.73,"MedrxivClusteringS2S":32.33,"RedditClustering":62.16,"RedditClusteringP2P":65.96,"StackExchangeClustering":71.35,"StackExchangeClusteringP2P":36.11,"TwentyNewsgroupsClustering":54.78} -{"level_0":23,"index":1,"Rank":24,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":47.48,"ArxivClusteringP2P":46.27,"ArxivClusteringS2S":38.36,"BiorxivClusteringP2P":37.87,"BiorxivClusteringS2S":35.67,"MedrxivClusteringP2P":33.11,"MedrxivClusteringS2S":31.54,"RedditClustering":65.81,"RedditClusteringP2P":66.62,"StackExchangeClustering":74.52,"StackExchangeClusteringP2P":37.63,"TwentyNewsgroupsClustering":54.87} -{"level_0":24,"index":34,"Rank":25,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.43,"ArxivClusteringP2P":49.17,"ArxivClusteringS2S":42.18,"BiorxivClusteringP2P":40.96,"BiorxivClusteringS2S":37.25,"MedrxivClusteringP2P":37.41,"MedrxivClusteringS2S":33.39,"RedditClustering":58.46,"RedditClusteringP2P":66.35,"StackExchangeClustering":68.42,"StackExchangeClusteringP2P":37.51,"TwentyNewsgroupsClustering":50.64} -{"level_0":25,"index":8,"Rank":26,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.4,"ArxivClusteringP2P":47.92,"ArxivClusteringS2S":42.42,"BiorxivClusteringP2P":38.72,"BiorxivClusteringS2S":36.6,"MedrxivClusteringP2P":34.04,"MedrxivClusteringS2S":32.81,"RedditClustering":61.56,"RedditClusteringP2P":65.35,"StackExchangeClustering":70.16,"StackExchangeClusteringP2P":38.23,"TwentyNewsgroupsClustering":53.56} -{"level_0":26,"index":161,"Rank":27,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.1,"ArxivClusteringP2P":46.4,"ArxivClusteringS2S":40.49,"BiorxivClusteringP2P":40.94,"BiorxivClusteringS2S":36.28,"MedrxivClusteringP2P":36.93,"MedrxivClusteringS2S":35.54,"RedditClustering":56.6,"RedditClusteringP2P":64.27,"StackExchangeClustering":66.85,"StackExchangeClusteringP2P":42.46,"TwentyNewsgroupsClustering":51.33} -{"level_0":27,"index":53,"Rank":28,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.93,"ArxivClusteringP2P":49.03,"ArxivClusteringS2S":43.05,"BiorxivClusteringP2P":39.74,"BiorxivClusteringS2S":36.57,"MedrxivClusteringP2P":33.71,"MedrxivClusteringS2S":31.81,"RedditClustering":61.38,"RedditClusteringP2P":65.29,"StackExchangeClustering":66.6,"StackExchangeClusteringP2P":36.32,"TwentyNewsgroupsClustering":52.69} -{"level_0":28,"index":253,"Rank":29,"Model":"gte-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.84,"ArxivClusteringP2P":48.62,"ArxivClusteringS2S":43.36,"BiorxivClusteringP2P":39.11,"BiorxivClusteringS2S":36.85,"MedrxivClusteringP2P":33.39,"MedrxivClusteringS2S":31.76,"RedditClustering":60.83,"RedditClusteringP2P":64.24,"StackExchangeClustering":67.64,"StackExchangeClusteringP2P":36.57,"TwentyNewsgroupsClustering":52.82} -{"level_0":29,"index":18,"Rank":30,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.82,"ArxivClusteringP2P":47.51,"ArxivClusteringS2S":42.05,"BiorxivClusteringP2P":40.32,"BiorxivClusteringS2S":37.55,"MedrxivClusteringP2P":34.6,"MedrxivClusteringS2S":32.27,"RedditClustering":58.61,"RedditClusteringP2P":66.87,"StackExchangeClustering":68.93,"StackExchangeClusteringP2P":37.6,"TwentyNewsgroupsClustering":48.75} -{"level_0":30,"index":108,"Rank":31,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.73,"ArxivClusteringP2P":49.03,"ArxivClusteringS2S":43.09,"BiorxivClusteringP2P":39.38,"BiorxivClusteringS2S":37.23,"MedrxivClusteringP2P":33.22,"MedrxivClusteringS2S":31.18,"RedditClustering":60.52,"RedditClusteringP2P":65.35,"StackExchangeClustering":66.54,"StackExchangeClusteringP2P":36.72,"TwentyNewsgroupsClustering":51.72} -{"level_0":31,"index":165,"Rank":32,"Model":"UAE-Large-V1-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.73,"ArxivClusteringP2P":49.03,"ArxivClusteringS2S":43.09,"BiorxivClusteringP2P":39.38,"BiorxivClusteringS2S":37.23,"MedrxivClusteringP2P":33.22,"MedrxivClusteringS2S":31.18,"RedditClustering":60.52,"RedditClusteringP2P":65.35,"StackExchangeClustering":66.54,"StackExchangeClusteringP2P":36.72,"TwentyNewsgroupsClustering":51.72} -{"level_0":32,"index":111,"Rank":33,"Model":"UAE-Large-V1-Q5_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.73,"ArxivClusteringP2P":49.03,"ArxivClusteringS2S":43.09,"BiorxivClusteringP2P":39.38,"BiorxivClusteringS2S":37.23,"MedrxivClusteringP2P":33.22,"MedrxivClusteringS2S":31.18,"RedditClustering":60.52,"RedditClusteringP2P":65.35,"StackExchangeClustering":66.54,"StackExchangeClusteringP2P":36.72,"TwentyNewsgroupsClustering":51.72} -{"level_0":33,"index":194,"Rank":34,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.71,"ArxivClusteringP2P":48.97,"ArxivClusteringS2S":42.98,"BiorxivClusteringP2P":39.92,"BiorxivClusteringS2S":36.73,"MedrxivClusteringP2P":33.44,"MedrxivClusteringS2S":31.66,"RedditClustering":60.22,"RedditClusteringP2P":65.29,"StackExchangeClustering":65.62,"StackExchangeClusteringP2P":35.78,"TwentyNewsgroupsClustering":53.21} -{"level_0":34,"index":133,"Rank":35,"Model":"mxbai-embed-large-v1-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.71,"ArxivClusteringP2P":48.97,"ArxivClusteringS2S":42.98,"BiorxivClusteringP2P":39.92,"BiorxivClusteringS2S":36.73,"MedrxivClusteringP2P":33.44,"MedrxivClusteringS2S":31.66,"RedditClustering":60.22,"RedditClusteringP2P":65.29,"StackExchangeClustering":65.62,"StackExchangeClusteringP2P":35.78,"TwentyNewsgroupsClustering":53.21} -{"level_0":35,"index":284,"Rank":36,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.65,"ArxivClusteringP2P":46.57,"ArxivClusteringS2S":39.35,"BiorxivClusteringP2P":37.77,"BiorxivClusteringS2S":34.68,"MedrxivClusteringP2P":32.77,"MedrxivClusteringS2S":31.85,"RedditClustering":64.09,"RedditClusteringP2P":65.12,"StackExchangeClustering":72.05,"StackExchangeClusteringP2P":34.04,"TwentyNewsgroupsClustering":54.81} -{"level_0":36,"index":170,"Rank":37,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.61,"ArxivClusteringP2P":48.73,"ArxivClusteringS2S":42.87,"BiorxivClusteringP2P":39.73,"BiorxivClusteringS2S":37.24,"MedrxivClusteringP2P":34.31,"MedrxivClusteringS2S":32.18,"RedditClustering":59.43,"RedditClusteringP2P":64.54,"StackExchangeClustering":65.08,"StackExchangeClusteringP2P":35.52,"TwentyNewsgroupsClustering":53.12} -{"level_0":37,"index":36,"Rank":38,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.6,"ArxivClusteringP2P":48.16,"ArxivClusteringS2S":40.79,"BiorxivClusteringP2P":40.5,"BiorxivClusteringS2S":36.91,"MedrxivClusteringP2P":36.18,"MedrxivClusteringS2S":33.44,"RedditClustering":58.11,"RedditClusteringP2P":65.02,"StackExchangeClustering":68.12,"StackExchangeClusteringP2P":35.22,"TwentyNewsgroupsClustering":50.14} -{"level_0":38,"index":117,"Rank":39,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.55,"ArxivClusteringP2P":48.5,"ArxivClusteringS2S":42.58,"BiorxivClusteringP2P":39.34,"BiorxivClusteringS2S":36.18,"MedrxivClusteringP2P":34.48,"MedrxivClusteringS2S":32.26,"RedditClustering":60.62,"RedditClusteringP2P":63.29,"StackExchangeClustering":66.47,"StackExchangeClusteringP2P":36.4,"TwentyNewsgroupsClustering":51.91} -{"level_0":39,"index":197,"Rank":40,"Model":"b1ade-embed-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.46,"ArxivClusteringP2P":48.66,"ArxivClusteringS2S":42.81,"BiorxivClusteringP2P":39.81,"BiorxivClusteringS2S":36.63,"MedrxivClusteringP2P":33.63,"MedrxivClusteringS2S":31.7,"RedditClustering":59.77,"RedditClusteringP2P":63.79,"StackExchangeClustering":66.31,"StackExchangeClusteringP2P":34.99,"TwentyNewsgroupsClustering":52.98} -{"level_0":40,"index":261,"Rank":41,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.46,"ArxivClusteringP2P":48.66,"ArxivClusteringS2S":42.81,"BiorxivClusteringP2P":39.81,"BiorxivClusteringS2S":36.63,"MedrxivClusteringP2P":33.63,"MedrxivClusteringS2S":31.7,"RedditClustering":59.77,"RedditClusteringP2P":63.79,"StackExchangeClustering":66.31,"StackExchangeClusteringP2P":34.99,"TwentyNewsgroupsClustering":52.98} -{"level_0":41,"index":62,"Rank":42,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":46.45,"ArxivClusteringP2P":44.27,"ArxivClusteringS2S":46.85,"BiorxivClusteringP2P":32.35,"BiorxivClusteringS2S":36.7,"MedrxivClusteringP2P":30.71,"MedrxivClusteringS2S":32.96,"RedditClustering":61.72,"RedditClusteringP2P":63.98,"StackExchangeClustering":72.74,"StackExchangeClusteringP2P":32.26,"TwentyNewsgroupsClustering":56.41} -{"level_0":42,"index":178,"Rank":43,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.32,"ArxivClusteringP2P":47.02,"ArxivClusteringS2S":43.52,"BiorxivClusteringP2P":35.53,"BiorxivClusteringS2S":35.34,"MedrxivClusteringP2P":30.27,"MedrxivClusteringS2S":29.67,"RedditClustering":61.77,"RedditClusteringP2P":66.01,"StackExchangeClustering":72.04,"StackExchangeClusteringP2P":35.29,"TwentyNewsgroupsClustering":53.04} -{"level_0":43,"index":282,"Rank":44,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.23,"ArxivClusteringP2P":47.05,"ArxivClusteringS2S":42.59,"BiorxivClusteringP2P":35.43,"BiorxivClusteringS2S":33.86,"MedrxivClusteringP2P":32.1,"MedrxivClusteringS2S":31.15,"RedditClustering":60.18,"RedditClusteringP2P":64.71,"StackExchangeClustering":71.23,"StackExchangeClusteringP2P":35.95,"TwentyNewsgroupsClustering":54.24} -{"level_0":44,"index":115,"Rank":45,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.21,"ArxivClusteringP2P":48.29,"ArxivClusteringS2S":42.74,"BiorxivClusteringP2P":39.04,"BiorxivClusteringS2S":36.67,"MedrxivClusteringP2P":33.92,"MedrxivClusteringS2S":32.25,"RedditClustering":59.11,"RedditClusteringP2P":62.44,"StackExchangeClustering":66.14,"StackExchangeClusteringP2P":35.59,"TwentyNewsgroupsClustering":52.16} -{"level_0":45,"index":252,"Rank":46,"Model":"gte-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.2,"ArxivClusteringP2P":48.6,"ArxivClusteringS2S":43.01,"BiorxivClusteringP2P":38.2,"BiorxivClusteringS2S":36.59,"MedrxivClusteringP2P":33.17,"MedrxivClusteringS2S":31.77,"RedditClustering":59.33,"RedditClusteringP2P":62.59,"StackExchangeClustering":66.64,"StackExchangeClusteringP2P":36.0,"TwentyNewsgroupsClustering":52.31} -{"level_0":46,"index":22,"Rank":47,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":46.08,"ArxivClusteringP2P":48.57,"ArxivClusteringS2S":43.19,"BiorxivClusteringP2P":39.71,"BiorxivClusteringS2S":36.9,"MedrxivClusteringP2P":32.56,"MedrxivClusteringS2S":31.47,"RedditClustering":57.24,"RedditClusteringP2P":64.66,"StackExchangeClustering":66.4,"StackExchangeClusteringP2P":34.96,"TwentyNewsgroupsClustering":51.27} -{"level_0":47,"index":150,"Rank":48,"Model":"bge-large-en-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.08,"ArxivClusteringP2P":48.57,"ArxivClusteringS2S":43.19,"BiorxivClusteringP2P":39.71,"BiorxivClusteringS2S":36.9,"MedrxivClusteringP2P":32.56,"MedrxivClusteringS2S":31.47,"RedditClustering":57.24,"RedditClusteringP2P":64.66,"StackExchangeClustering":66.4,"StackExchangeClusteringP2P":34.96,"TwentyNewsgroupsClustering":51.27} -{"level_0":48,"index":114,"Rank":49,"Model":"bge-large-en-v1.5-Q4_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.08,"ArxivClusteringP2P":48.57,"ArxivClusteringS2S":43.19,"BiorxivClusteringP2P":39.71,"BiorxivClusteringS2S":36.9,"MedrxivClusteringP2P":32.56,"MedrxivClusteringS2S":31.47,"RedditClustering":57.24,"RedditClusteringP2P":64.66,"StackExchangeClustering":66.4,"StackExchangeClusteringP2P":34.96,"TwentyNewsgroupsClustering":51.27} -{"level_0":49,"index":193,"Rank":50,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.07,"ArxivClusteringP2P":48.29,"ArxivClusteringS2S":42.34,"BiorxivClusteringP2P":39.73,"BiorxivClusteringS2S":36.95,"MedrxivClusteringP2P":33.66,"MedrxivClusteringS2S":32.22,"RedditClustering":57.93,"RedditClusteringP2P":62.47,"StackExchangeClustering":66.41,"StackExchangeClusteringP2P":35.32,"TwentyNewsgroupsClustering":51.48} -{"level_0":50,"index":281,"Rank":51,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.9,"ArxivClusteringP2P":45.01,"ArxivClusteringS2S":36.85,"BiorxivClusteringP2P":36.66,"BiorxivClusteringS2S":34.21,"MedrxivClusteringP2P":32.6,"MedrxivClusteringS2S":30.8,"RedditClustering":61.42,"RedditClusteringP2P":64.13,"StackExchangeClustering":72.22,"StackExchangeClusteringP2P":38.49,"TwentyNewsgroupsClustering":52.56} -{"level_0":51,"index":20,"Rank":52,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":45.81,"ArxivClusteringP2P":48.75,"ArxivClusteringS2S":42.81,"BiorxivClusteringP2P":39.44,"BiorxivClusteringS2S":36.62,"MedrxivClusteringP2P":33.21,"MedrxivClusteringS2S":31.68,"RedditClustering":56.61,"RedditClusteringP2P":62.66,"StackExchangeClustering":66.11,"StackExchangeClusteringP2P":35.24,"TwentyNewsgroupsClustering":50.75} -{"level_0":52,"index":120,"Rank":53,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.77,"ArxivClusteringP2P":48.75,"ArxivClusteringS2S":42.81,"BiorxivClusteringP2P":39.1,"BiorxivClusteringS2S":36.7,"MedrxivClusteringP2P":32.95,"MedrxivClusteringS2S":31.56,"RedditClustering":56.24,"RedditClusteringP2P":62.88,"StackExchangeClustering":65.95,"StackExchangeClusteringP2P":35.01,"TwentyNewsgroupsClustering":51.47} -{"level_0":53,"index":181,"Rank":54,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.77,"ArxivClusteringP2P":48.75,"ArxivClusteringS2S":42.81,"BiorxivClusteringP2P":39.1,"BiorxivClusteringS2S":36.7,"MedrxivClusteringP2P":32.95,"MedrxivClusteringS2S":31.56,"RedditClustering":56.24,"RedditClusteringP2P":62.88,"StackExchangeClustering":65.95,"StackExchangeClusteringP2P":35.01,"TwentyNewsgroupsClustering":51.47} -{"level_0":54,"index":182,"Rank":55,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.77,"ArxivClusteringP2P":48.75,"ArxivClusteringS2S":42.81,"BiorxivClusteringP2P":39.1,"BiorxivClusteringS2S":36.7,"MedrxivClusteringP2P":32.95,"MedrxivClusteringS2S":31.56,"RedditClustering":56.24,"RedditClusteringP2P":62.88,"StackExchangeClustering":65.95,"StackExchangeClusteringP2P":35.01,"TwentyNewsgroupsClustering":51.47} -{"level_0":55,"index":179,"Rank":56,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.77,"ArxivClusteringP2P":48.75,"ArxivClusteringS2S":42.81,"BiorxivClusteringP2P":39.1,"BiorxivClusteringS2S":36.7,"MedrxivClusteringP2P":32.95,"MedrxivClusteringS2S":31.56,"RedditClustering":56.24,"RedditClusteringP2P":62.88,"StackExchangeClustering":65.95,"StackExchangeClusteringP2P":35.01,"TwentyNewsgroupsClustering":51.47} -{"level_0":56,"index":180,"Rank":57,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.77,"ArxivClusteringP2P":48.75,"ArxivClusteringS2S":42.81,"BiorxivClusteringP2P":39.1,"BiorxivClusteringS2S":36.7,"MedrxivClusteringP2P":32.95,"MedrxivClusteringS2S":31.56,"RedditClustering":56.24,"RedditClusteringP2P":62.88,"StackExchangeClustering":65.95,"StackExchangeClusteringP2P":35.01,"TwentyNewsgroupsClustering":51.47} -{"level_0":57,"index":186,"Rank":58,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.58,"ArxivClusteringP2P":48.58,"ArxivClusteringS2S":43.14,"BiorxivClusteringP2P":38.84,"BiorxivClusteringS2S":36.38,"MedrxivClusteringP2P":31.96,"MedrxivClusteringS2S":30.88,"RedditClustering":57.61,"RedditClusteringP2P":64.03,"StackExchangeClustering":65.54,"StackExchangeClusteringP2P":33.98,"TwentyNewsgroupsClustering":50.41} -{"level_0":58,"index":64,"Rank":59,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":45.54,"ArxivClusteringP2P":42.81,"ArxivClusteringS2S":44.24,"BiorxivClusteringP2P":34.27,"BiorxivClusteringS2S":35.53,"MedrxivClusteringP2P":31.07,"MedrxivClusteringS2S":31.27,"RedditClustering":60.24,"RedditClusteringP2P":64.12,"StackExchangeClustering":70.73,"StackExchangeClusteringP2P":34.5,"TwentyNewsgroupsClustering":52.18} -{"level_0":59,"index":148,"Rank":60,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":45.29,"ArxivClusteringP2P":43.16,"ArxivClusteringS2S":32.56,"BiorxivClusteringP2P":37.62,"BiorxivClusteringS2S":31.33,"MedrxivClusteringP2P":34.22,"MedrxivClusteringS2S":32.0,"RedditClustering":63.65,"RedditClusteringP2P":64.63,"StackExchangeClustering":68.78,"StackExchangeClusteringP2P":36.15,"TwentyNewsgroupsClustering":54.13} -{"level_0":60,"index":60,"Rank":61,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":45.24,"ArxivClusteringP2P":43.14,"ArxivClusteringS2S":42.38,"BiorxivClusteringP2P":35.88,"BiorxivClusteringS2S":34.81,"MedrxivClusteringP2P":32.23,"MedrxivClusteringS2S":31.37,"RedditClustering":61.1,"RedditClusteringP2P":64.52,"StackExchangeClustering":67.98,"StackExchangeClusteringP2P":33.2,"TwentyNewsgroupsClustering":51.04} -{"level_0":61,"index":0,"Rank":62,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":45.07,"ArxivClusteringP2P":44.12,"ArxivClusteringS2S":36.54,"BiorxivClusteringP2P":36.28,"BiorxivClusteringS2S":33.09,"MedrxivClusteringP2P":32.08,"MedrxivClusteringS2S":30.84,"RedditClustering":62.24,"RedditClusteringP2P":63.7,"StackExchangeClustering":70.19,"StackExchangeClusteringP2P":36.1,"TwentyNewsgroupsClustering":50.6} -{"level_0":62,"index":198,"Rank":63,"Model":"bge-base-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.06,"ArxivClusteringP2P":48.5,"ArxivClusteringS2S":42.01,"BiorxivClusteringP2P":39.3,"BiorxivClusteringS2S":35.65,"MedrxivClusteringP2P":32.8,"MedrxivClusteringS2S":30.96,"RedditClustering":55.69,"RedditClusteringP2P":62.33,"StackExchangeClustering":64.81,"StackExchangeClusteringP2P":34.08,"TwentyNewsgroupsClustering":49.5} -{"level_0":63,"index":125,"Rank":64,"Model":"NoInstruct-small-Embedding-v0-Q4_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.95,"ArxivClusteringP2P":47.78,"ArxivClusteringS2S":40.13,"BiorxivClusteringP2P":39.4,"BiorxivClusteringS2S":35.1,"MedrxivClusteringP2P":34.71,"MedrxivClusteringS2S":32.15,"RedditClustering":55.4,"RedditClusteringP2P":61.23,"StackExchangeClustering":62.05,"StackExchangeClusteringP2P":36.65,"TwentyNewsgroupsClustering":49.86} -{"level_0":64,"index":119,"Rank":65,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.95,"ArxivClusteringP2P":47.78,"ArxivClusteringS2S":40.13,"BiorxivClusteringP2P":39.4,"BiorxivClusteringS2S":35.1,"MedrxivClusteringP2P":34.71,"MedrxivClusteringS2S":32.15,"RedditClustering":55.4,"RedditClusteringP2P":61.23,"StackExchangeClustering":62.05,"StackExchangeClusteringP2P":36.65,"TwentyNewsgroupsClustering":49.86} -{"level_0":65,"index":137,"Rank":66,"Model":"stella-base-en-v2-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.9,"ArxivClusteringP2P":47.24,"ArxivClusteringS2S":40.47,"BiorxivClusteringP2P":39.56,"BiorxivClusteringS2S":35.91,"MedrxivClusteringP2P":33.25,"MedrxivClusteringS2S":31.96,"RedditClustering":55.07,"RedditClusteringP2P":62.12,"StackExchangeClustering":64.47,"StackExchangeClusteringP2P":33.8,"TwentyNewsgroupsClustering":50.08} -{"level_0":66,"index":151,"Rank":67,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.9,"ArxivClusteringP2P":47.24,"ArxivClusteringS2S":40.47,"BiorxivClusteringP2P":39.56,"BiorxivClusteringS2S":35.91,"MedrxivClusteringP2P":33.25,"MedrxivClusteringS2S":31.96,"RedditClustering":55.07,"RedditClusteringP2P":62.12,"StackExchangeClustering":64.47,"StackExchangeClusteringP2P":33.8,"TwentyNewsgroupsClustering":50.08} -{"level_0":67,"index":254,"Rank":68,"Model":"gte-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.89,"ArxivClusteringP2P":47.9,"ArxivClusteringS2S":40.26,"BiorxivClusteringP2P":38.37,"BiorxivClusteringS2S":35.49,"MedrxivClusteringP2P":33.78,"MedrxivClusteringS2S":32.07,"RedditClustering":55.62,"RedditClusteringP2P":61.39,"StackExchangeClustering":62.63,"StackExchangeClusteringP2P":36.33,"TwentyNewsgroupsClustering":49.95} -{"level_0":68,"index":118,"Rank":69,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.82,"ArxivClusteringP2P":47.62,"ArxivClusteringS2S":39.86,"BiorxivClusteringP2P":39.22,"BiorxivClusteringS2S":35.43,"MedrxivClusteringP2P":34.5,"MedrxivClusteringS2S":32.16,"RedditClustering":55.51,"RedditClusteringP2P":60.64,"StackExchangeClustering":61.91,"StackExchangeClusteringP2P":36.16,"TwentyNewsgroupsClustering":50.01} -{"level_0":69,"index":149,"Rank":70,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":44.74,"ArxivClusteringP2P":42.45,"ArxivClusteringS2S":32.21,"BiorxivClusteringP2P":37.37,"BiorxivClusteringS2S":30.55,"MedrxivClusteringP2P":33.19,"MedrxivClusteringS2S":30.79,"RedditClustering":63.49,"RedditClusteringP2P":65.13,"StackExchangeClustering":68.44,"StackExchangeClusteringP2P":35.17,"TwentyNewsgroupsClustering":53.38} -{"level_0":70,"index":33,"Rank":71,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.64,"ArxivClusteringP2P":46.19,"ArxivClusteringS2S":37.58,"BiorxivClusteringP2P":38.97,"BiorxivClusteringS2S":34.52,"MedrxivClusteringP2P":37.66,"MedrxivClusteringS2S":33.54,"RedditClustering":53.5,"RedditClusteringP2P":63.59,"StackExchangeClustering":62.94,"StackExchangeClusteringP2P":36.48,"TwentyNewsgroupsClustering":46.06} -{"level_0":71,"index":93,"Rank":72,"Model":"e5-large-v2-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.49,"ArxivClusteringP2P":45.55,"ArxivClusteringS2S":41.02,"BiorxivClusteringP2P":37.36,"BiorxivClusteringS2S":34.85,"MedrxivClusteringP2P":31.82,"MedrxivClusteringS2S":30.38,"RedditClustering":55.83,"RedditClusteringP2P":63.38,"StackExchangeClustering":65.92,"StackExchangeClusteringP2P":33.67,"TwentyNewsgroupsClustering":49.6} -{"level_0":72,"index":155,"Rank":73,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":44.49,"ArxivClusteringP2P":45.55,"ArxivClusteringS2S":41.02,"BiorxivClusteringP2P":37.36,"BiorxivClusteringS2S":34.85,"MedrxivClusteringP2P":31.82,"MedrxivClusteringS2S":30.38,"RedditClustering":55.83,"RedditClusteringP2P":63.38,"StackExchangeClustering":65.92,"StackExchangeClusteringP2P":33.67,"TwentyNewsgroupsClustering":49.6} -{"level_0":73,"index":213,"Rank":74,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.93,"ArxivClusteringP2P":45.69,"ArxivClusteringS2S":36.35,"BiorxivClusteringP2P":38.77,"BiorxivClusteringS2S":32.94,"MedrxivClusteringP2P":34.53,"MedrxivClusteringS2S":30.94,"RedditClustering":56.52,"RedditClusteringP2P":61.05,"StackExchangeClustering":63.19,"StackExchangeClusteringP2P":34.03,"TwentyNewsgroupsClustering":49.21} -{"level_0":74,"index":29,"Rank":75,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.91,"ArxivClusteringP2P":45.93,"ArxivClusteringS2S":36.65,"BiorxivClusteringP2P":38.49,"BiorxivClusteringS2S":32.76,"MedrxivClusteringP2P":33.74,"MedrxivClusteringS2S":30.64,"RedditClustering":56.32,"RedditClusteringP2P":61.9,"StackExchangeClustering":64.31,"StackExchangeClusteringP2P":33.26,"TwentyNewsgroupsClustering":49.06} -{"level_0":75,"index":206,"Rank":76,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":43.91,"ArxivClusteringP2P":45.93,"ArxivClusteringS2S":36.65,"BiorxivClusteringP2P":38.49,"BiorxivClusteringS2S":32.76,"MedrxivClusteringP2P":33.74,"MedrxivClusteringS2S":30.64,"RedditClustering":56.32,"RedditClusteringP2P":61.9,"StackExchangeClustering":64.31,"StackExchangeClusteringP2P":33.26,"TwentyNewsgroupsClustering":49.06} -{"level_0":76,"index":129,"Rank":77,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":43.91,"ArxivClusteringP2P":45.93,"ArxivClusteringS2S":36.65,"BiorxivClusteringP2P":38.49,"BiorxivClusteringS2S":32.76,"MedrxivClusteringP2P":33.74,"MedrxivClusteringS2S":30.64,"RedditClustering":56.32,"RedditClusteringP2P":61.9,"StackExchangeClustering":64.31,"StackExchangeClusteringP2P":33.26,"TwentyNewsgroupsClustering":49.06} -{"level_0":77,"index":28,"Rank":78,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.91,"ArxivClusteringP2P":45.93,"ArxivClusteringS2S":36.65,"BiorxivClusteringP2P":38.49,"BiorxivClusteringS2S":32.76,"MedrxivClusteringP2P":33.74,"MedrxivClusteringS2S":30.64,"RedditClustering":56.32,"RedditClusteringP2P":61.9,"StackExchangeClustering":64.31,"StackExchangeClusteringP2P":33.26,"TwentyNewsgroupsClustering":49.06} -{"level_0":78,"index":26,"Rank":79,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.91,"ArxivClusteringP2P":45.93,"ArxivClusteringS2S":36.65,"BiorxivClusteringP2P":38.49,"BiorxivClusteringS2S":32.76,"MedrxivClusteringP2P":33.74,"MedrxivClusteringS2S":30.64,"RedditClustering":56.32,"RedditClusteringP2P":61.9,"StackExchangeClustering":64.31,"StackExchangeClusteringP2P":33.26,"TwentyNewsgroupsClustering":49.06} -{"level_0":79,"index":27,"Rank":80,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.91,"ArxivClusteringP2P":45.93,"ArxivClusteringS2S":36.65,"BiorxivClusteringP2P":38.49,"BiorxivClusteringS2S":32.76,"MedrxivClusteringP2P":33.74,"MedrxivClusteringS2S":30.64,"RedditClustering":56.32,"RedditClusteringP2P":61.9,"StackExchangeClustering":64.31,"StackExchangeClusteringP2P":33.26,"TwentyNewsgroupsClustering":49.06} -{"level_0":80,"index":24,"Rank":81,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":43.82,"ArxivClusteringP2P":47.4,"ArxivClusteringS2S":40.02,"BiorxivClusteringP2P":38.47,"BiorxivClusteringS2S":34.72,"MedrxivClusteringP2P":33.06,"MedrxivClusteringS2S":30.86,"RedditClustering":52.32,"RedditClusteringP2P":60.64,"StackExchangeClustering":60.78,"StackExchangeClusteringP2P":35.27,"TwentyNewsgroupsClustering":48.52} -{"level_0":81,"index":153,"Rank":82,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":43.8,"ArxivClusteringP2P":46.1,"ArxivClusteringS2S":39.67,"BiorxivClusteringP2P":37.5,"BiorxivClusteringS2S":32.72,"MedrxivClusteringP2P":31.46,"MedrxivClusteringS2S":28.99,"RedditClustering":56.16,"RedditClusteringP2P":63.43,"StackExchangeClustering":65.15,"StackExchangeClusteringP2P":32.5,"TwentyNewsgroupsClustering":48.16} -{"level_0":82,"index":140,"Rank":83,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":43.8,"ArxivClusteringP2P":46.1,"ArxivClusteringS2S":39.67,"BiorxivClusteringP2P":37.5,"BiorxivClusteringS2S":32.72,"MedrxivClusteringP2P":31.46,"MedrxivClusteringS2S":28.99,"RedditClustering":56.16,"RedditClusteringP2P":63.43,"StackExchangeClustering":65.15,"StackExchangeClusteringP2P":32.5,"TwentyNewsgroupsClustering":48.16} -{"level_0":83,"index":246,"Rank":84,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":43.72,"ArxivClusteringP2P":42.89,"ArxivClusteringS2S":33.47,"BiorxivClusteringP2P":36.53,"BiorxivClusteringS2S":28.66,"MedrxivClusteringP2P":32.09,"MedrxivClusteringS2S":26.82,"RedditClustering":58.99,"RedditClusteringP2P":64.46,"StackExchangeClustering":70.78,"StackExchangeClusteringP2P":35.25,"TwentyNewsgroupsClustering":50.93} -{"level_0":84,"index":211,"Rank":85,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":43.71,"ArxivClusteringP2P":45.45,"ArxivClusteringS2S":36.19,"BiorxivClusteringP2P":38.41,"BiorxivClusteringS2S":32.28,"MedrxivClusteringP2P":34.47,"MedrxivClusteringS2S":31.43,"RedditClustering":55.9,"RedditClusteringP2P":60.58,"StackExchangeClustering":62.94,"StackExchangeClusteringP2P":33.81,"TwentyNewsgroupsClustering":49.36} -{"level_0":85,"index":207,"Rank":86,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.7,"ArxivClusteringP2P":45.85,"ArxivClusteringS2S":36.13,"BiorxivClusteringP2P":38.46,"BiorxivClusteringS2S":33.14,"MedrxivClusteringP2P":32.73,"MedrxivClusteringS2S":30.55,"RedditClustering":56.82,"RedditClusteringP2P":61.57,"StackExchangeClustering":63.18,"StackExchangeClusteringP2P":33.51,"TwentyNewsgroupsClustering":48.72} -{"level_0":86,"index":230,"Rank":87,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":43.69,"ArxivClusteringP2P":48.38,"ArxivClusteringS2S":39.72,"BiorxivClusteringP2P":39.62,"BiorxivClusteringS2S":35.02,"MedrxivClusteringP2P":35.58,"MedrxivClusteringS2S":32.87,"RedditClustering":54.82,"RedditClusteringP2P":56.77,"StackExchangeClustering":53.8,"StackExchangeClusteringP2P":34.28,"TwentyNewsgroupsClustering":49.74} -{"level_0":87,"index":66,"Rank":88,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":43.57,"ArxivClusteringP2P":43.47,"ArxivClusteringS2S":39.85,"BiorxivClusteringP2P":37.1,"BiorxivClusteringS2S":34.28,"MedrxivClusteringP2P":33.55,"MedrxivClusteringS2S":31.11,"RedditClustering":53.02,"RedditClusteringP2P":60.47,"StackExchangeClustering":63.04,"StackExchangeClusteringP2P":34.01,"TwentyNewsgroupsClustering":49.37} -{"level_0":88,"index":185,"Rank":89,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.48,"ArxivClusteringP2P":44.44,"ArxivClusteringS2S":34.19,"BiorxivClusteringP2P":37.82,"BiorxivClusteringS2S":32.13,"MedrxivClusteringP2P":33.88,"MedrxivClusteringS2S":32.08,"RedditClustering":54.25,"RedditClusteringP2P":61.28,"StackExchangeClustering":64.7,"StackExchangeClusteringP2P":34.23,"TwentyNewsgroupsClustering":49.31} -{"level_0":89,"index":262,"Rank":90,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.43,"ArxivClusteringP2P":45.8,"ArxivClusteringS2S":37.64,"BiorxivClusteringP2P":38.69,"BiorxivClusteringS2S":34.45,"MedrxivClusteringP2P":32.86,"MedrxivClusteringS2S":34.02,"RedditClustering":54.76,"RedditClusteringP2P":56.28,"StackExchangeClustering":63.95,"StackExchangeClusteringP2P":32.22,"TwentyNewsgroupsClustering":47.07} -{"level_0":90,"index":154,"Rank":91,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":43.33,"ArxivClusteringP2P":46.19,"ArxivClusteringS2S":41.38,"BiorxivClusteringP2P":37.6,"BiorxivClusteringS2S":35.09,"MedrxivClusteringP2P":32.26,"MedrxivClusteringS2S":29.66,"RedditClustering":50.69,"RedditClusteringP2P":61.37,"StackExchangeClustering":64.96,"StackExchangeClusteringP2P":33.6,"TwentyNewsgroupsClustering":43.81} -{"level_0":91,"index":210,"Rank":92,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":43.16,"ArxivClusteringP2P":44.82,"ArxivClusteringS2S":35.32,"BiorxivClusteringP2P":38.19,"BiorxivClusteringS2S":31.83,"MedrxivClusteringP2P":34.08,"MedrxivClusteringS2S":30.98,"RedditClustering":54.92,"RedditClusteringP2P":60.23,"StackExchangeClustering":61.81,"StackExchangeClusteringP2P":34.03,"TwentyNewsgroupsClustering":48.56} -{"level_0":92,"index":184,"Rank":93,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.97,"ArxivClusteringP2P":45.54,"ArxivClusteringS2S":38.43,"BiorxivClusteringP2P":39.3,"BiorxivClusteringS2S":35.11,"MedrxivClusteringP2P":34.8,"MedrxivClusteringS2S":32.78,"RedditClustering":49.24,"RedditClusteringP2P":58.82,"StackExchangeClustering":58.89,"StackExchangeClusteringP2P":33.6,"TwentyNewsgroupsClustering":46.13} -{"level_0":93,"index":199,"Rank":94,"Model":"bge-base-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.88,"ArxivClusteringP2P":46.5,"ArxivClusteringS2S":40.08,"BiorxivClusteringP2P":36.79,"BiorxivClusteringS2S":34.42,"MedrxivClusteringP2P":32.0,"MedrxivClusteringS2S":30.38,"RedditClustering":51.0,"RedditClusteringP2P":59.28,"StackExchangeClustering":60.92,"StackExchangeClusteringP2P":32.98,"TwentyNewsgroupsClustering":47.3} -{"level_0":94,"index":202,"Rank":95,"Model":"bge-small-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.63,"ArxivClusteringP2P":46.92,"ArxivClusteringS2S":39.12,"BiorxivClusteringP2P":37.43,"BiorxivClusteringS2S":33.89,"MedrxivClusteringP2P":31.9,"MedrxivClusteringS2S":30.71,"RedditClustering":50.5,"RedditClusteringP2P":60.3,"StackExchangeClustering":59.26,"StackExchangeClusteringP2P":33.76,"TwentyNewsgroupsClustering":45.09} -{"level_0":95,"index":208,"Rank":96,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.52,"ArxivClusteringP2P":45.18,"ArxivClusteringS2S":35.91,"BiorxivClusteringP2P":36.57,"BiorxivClusteringS2S":32.05,"MedrxivClusteringP2P":31.35,"MedrxivClusteringS2S":29.56,"RedditClustering":55.37,"RedditClusteringP2P":60.05,"StackExchangeClustering":62.28,"StackExchangeClusteringP2P":33.94,"TwentyNewsgroupsClustering":45.43} -{"level_0":96,"index":238,"Rank":97,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":42.42,"ArxivClusteringP2P":37.9,"ArxivClusteringS2S":32.39,"BiorxivClusteringP2P":30.48,"BiorxivClusteringS2S":27.5,"MedrxivClusteringP2P":29.12,"MedrxivClusteringS2S":27.56,"RedditClustering":64.13,"RedditClusteringP2P":62.84,"StackExchangeClustering":71.43,"StackExchangeClusteringP2P":32.85,"TwentyNewsgroupsClustering":50.44} -{"level_0":97,"index":229,"Rank":98,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":42.35,"ArxivClusteringP2P":46.55,"ArxivClusteringS2S":37.86,"BiorxivClusteringP2P":38.48,"BiorxivClusteringS2S":33.17,"MedrxivClusteringP2P":34.41,"MedrxivClusteringS2S":32.29,"RedditClustering":50.67,"RedditClusteringP2P":54.15,"StackExchangeClustering":53.36,"StackExchangeClusteringP2P":38.0,"TwentyNewsgroupsClustering":46.86} -{"level_0":98,"index":245,"Rank":99,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":42.34,"ArxivClusteringP2P":41.62,"ArxivClusteringS2S":31.17,"BiorxivClusteringP2P":36.43,"BiorxivClusteringS2S":26.47,"MedrxivClusteringP2P":32.3,"MedrxivClusteringS2S":26.93,"RedditClustering":57.03,"RedditClusteringP2P":62.34,"StackExchangeClustering":67.13,"StackExchangeClusteringP2P":34.79,"TwentyNewsgroupsClustering":49.53} -{"level_0":99,"index":209,"Rank":100,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":42.24,"ArxivClusteringP2P":43.87,"ArxivClusteringS2S":34.57,"BiorxivClusteringP2P":36.79,"BiorxivClusteringS2S":30.68,"MedrxivClusteringP2P":34.09,"MedrxivClusteringS2S":31.3,"RedditClustering":53.31,"RedditClusteringP2P":58.96,"StackExchangeClustering":59.92,"StackExchangeClusteringP2P":33.88,"TwentyNewsgroupsClustering":47.29} -{"level_0":100,"index":152,"Rank":101,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":42.11,"ArxivClusteringP2P":44.57,"ArxivClusteringS2S":40.48,"BiorxivClusteringP2P":36.19,"BiorxivClusteringS2S":32.72,"MedrxivClusteringP2P":31.53,"MedrxivClusteringS2S":28.29,"RedditClustering":48.18,"RedditClusteringP2P":62.19,"StackExchangeClustering":63.91,"StackExchangeClusteringP2P":32.56,"TwentyNewsgroupsClustering":42.58} -{"level_0":101,"index":107,"Rank":102,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.09,"ArxivClusteringP2P":46.64,"ArxivClusteringS2S":36.01,"BiorxivClusteringP2P":39.19,"BiorxivClusteringS2S":32.13,"MedrxivClusteringP2P":34.11,"MedrxivClusteringS2S":31.67,"RedditClustering":50.19,"RedditClusteringP2P":56.49,"StackExchangeClustering":57.54,"StackExchangeClusteringP2P":35.74,"TwentyNewsgroupsClustering":43.27} -{"level_0":102,"index":63,"Rank":103,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":41.99,"ArxivClusteringP2P":49.22,"ArxivClusteringS2S":41.71,"BiorxivClusteringP2P":38.39,"BiorxivClusteringS2S":31.31,"MedrxivClusteringP2P":31.47,"MedrxivClusteringS2S":27.87,"RedditClustering":43.67,"RedditClusteringP2P":61.67,"StackExchangeClustering":68.2,"StackExchangeClusteringP2P":36.36,"TwentyNewsgroupsClustering":32.01} -{"level_0":103,"index":35,"Rank":104,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.98,"ArxivClusteringP2P":44.21,"ArxivClusteringS2S":33.91,"BiorxivClusteringP2P":38.68,"BiorxivClusteringS2S":32.21,"MedrxivClusteringP2P":35.18,"MedrxivClusteringS2S":30.99,"RedditClustering":49.5,"RedditClusteringP2P":60.21,"StackExchangeClustering":60.85,"StackExchangeClusteringP2P":33.97,"TwentyNewsgroupsClustering":42.09} -{"level_0":104,"index":147,"Rank":105,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":41.9,"ArxivClusteringP2P":39.68,"ArxivClusteringS2S":29.19,"BiorxivClusteringP2P":32.98,"BiorxivClusteringS2S":25.72,"MedrxivClusteringP2P":30.89,"MedrxivClusteringS2S":28.38,"RedditClustering":59.26,"RedditClusteringP2P":63.22,"StackExchangeClustering":65.04,"StackExchangeClusteringP2P":35.28,"TwentyNewsgroupsClustering":51.31} -{"level_0":105,"index":100,"Rank":106,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.87,"ArxivClusteringP2P":47.24,"ArxivClusteringS2S":37.43,"BiorxivClusteringP2P":36.97,"BiorxivClusteringS2S":31.98,"MedrxivClusteringP2P":31.5,"MedrxivClusteringS2S":28.41,"RedditClustering":47.53,"RedditClusteringP2P":62.76,"StackExchangeClustering":59.55,"StackExchangeClusteringP2P":39.43,"TwentyNewsgroupsClustering":37.73} -{"level_0":106,"index":228,"Rank":107,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":41.81,"ArxivClusteringP2P":46.07,"ArxivClusteringS2S":37.5,"BiorxivClusteringP2P":36.99,"BiorxivClusteringS2S":33.21,"MedrxivClusteringP2P":34.25,"MedrxivClusteringS2S":32.24,"RedditClustering":51.18,"RedditClusteringP2P":54.8,"StackExchangeClustering":53.05,"StackExchangeClusteringP2P":33.13,"TwentyNewsgroupsClustering":47.47} -{"level_0":107,"index":175,"Rank":108,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":41.73,"ArxivClusteringP2P":45.39,"ArxivClusteringS2S":36.68,"BiorxivClusteringP2P":37.05,"BiorxivClusteringS2S":30.16,"MedrxivClusteringP2P":32.41,"MedrxivClusteringS2S":28.09,"RedditClustering":53.05,"RedditClusteringP2P":60.31,"StackExchangeClustering":58.52,"StackExchangeClusteringP2P":34.96,"TwentyNewsgroupsClustering":42.46} -{"level_0":108,"index":135,"Rank":109,"Model":"jina-embeddings-v2-base-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.73,"ArxivClusteringP2P":45.39,"ArxivClusteringS2S":36.68,"BiorxivClusteringP2P":37.05,"BiorxivClusteringS2S":30.16,"MedrxivClusteringP2P":32.41,"MedrxivClusteringS2S":28.09,"RedditClustering":53.05,"RedditClusteringP2P":60.31,"StackExchangeClustering":58.52,"StackExchangeClusteringP2P":34.96,"TwentyNewsgroupsClustering":42.46} -{"level_0":109,"index":244,"Rank":110,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":41.65,"ArxivClusteringP2P":41.62,"ArxivClusteringS2S":29.44,"BiorxivClusteringP2P":35.99,"BiorxivClusteringS2S":24.02,"MedrxivClusteringP2P":32.4,"MedrxivClusteringS2S":26.33,"RedditClustering":54.53,"RedditClusteringP2P":62.5,"StackExchangeClustering":65.11,"StackExchangeClusteringP2P":36.86,"TwentyNewsgroupsClustering":49.33} -{"level_0":110,"index":236,"Rank":111,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":41.6,"ArxivClusteringP2P":37.5,"ArxivClusteringS2S":30.55,"BiorxivClusteringP2P":29.59,"BiorxivClusteringS2S":25.72,"MedrxivClusteringP2P":28.72,"MedrxivClusteringS2S":27.39,"RedditClustering":61.69,"RedditClusteringP2P":61.67,"StackExchangeClustering":69.93,"StackExchangeClusteringP2P":33.21,"TwentyNewsgroupsClustering":51.64} -{"level_0":111,"index":101,"Rank":112,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.51,"ArxivClusteringP2P":45.56,"ArxivClusteringS2S":35.61,"BiorxivClusteringP2P":36.16,"BiorxivClusteringS2S":30.14,"MedrxivClusteringP2P":31.12,"MedrxivClusteringS2S":26.73,"RedditClustering":50.24,"RedditClusteringP2P":61.45,"StackExchangeClustering":62.63,"StackExchangeClusteringP2P":37.17,"TwentyNewsgroupsClustering":39.83} -{"level_0":112,"index":237,"Rank":113,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":41.51,"ArxivClusteringP2P":37.9,"ArxivClusteringS2S":30.45,"BiorxivClusteringP2P":30.52,"BiorxivClusteringS2S":26.06,"MedrxivClusteringP2P":28.69,"MedrxivClusteringS2S":26.69,"RedditClustering":61.34,"RedditClusteringP2P":61.11,"StackExchangeClustering":69.95,"StackExchangeClusteringP2P":32.73,"TwentyNewsgroupsClustering":51.15} -{"level_0":113,"index":99,"Rank":114,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.49,"ArxivClusteringP2P":47.46,"ArxivClusteringS2S":38.29,"BiorxivClusteringP2P":37.54,"BiorxivClusteringS2S":32.65,"MedrxivClusteringP2P":31.47,"MedrxivClusteringS2S":28.94,"RedditClustering":46.02,"RedditClusteringP2P":62.43,"StackExchangeClustering":57.86,"StackExchangeClusteringP2P":37.82,"TwentyNewsgroupsClustering":35.91} -{"level_0":114,"index":121,"Rank":115,"Model":"snowflake-arctic-embed-l-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.49,"ArxivClusteringP2P":47.46,"ArxivClusteringS2S":38.29,"BiorxivClusteringP2P":37.54,"BiorxivClusteringS2S":32.65,"MedrxivClusteringP2P":31.47,"MedrxivClusteringS2S":28.94,"RedditClustering":46.02,"RedditClusteringP2P":62.43,"StackExchangeClustering":57.86,"StackExchangeClusteringP2P":37.82,"TwentyNewsgroupsClustering":35.91} -{"level_0":115,"index":214,"Rank":116,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":41.1,"ArxivClusteringP2P":42.61,"ArxivClusteringS2S":32.32,"BiorxivClusteringP2P":34.97,"BiorxivClusteringS2S":29.08,"MedrxivClusteringP2P":31.19,"MedrxivClusteringS2S":27.27,"RedditClustering":54.89,"RedditClusteringP2P":57.58,"StackExchangeClustering":63.15,"StackExchangeClusteringP2P":32.25,"TwentyNewsgroupsClustering":46.82} -{"level_0":116,"index":160,"Rank":117,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":41.06,"ArxivClusteringP2P":44.31,"ArxivClusteringS2S":38.43,"BiorxivClusteringP2P":35.34,"BiorxivClusteringS2S":33.5,"MedrxivClusteringP2P":31.48,"MedrxivClusteringS2S":29.71,"RedditClustering":46.54,"RedditClusteringP2P":63.22,"StackExchangeClustering":57.53,"StackExchangeClusteringP2P":32.69,"TwentyNewsgroupsClustering":38.91} -{"level_0":117,"index":44,"Rank":118,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.9,"ArxivClusteringP2P":42.32,"ArxivClusteringS2S":31.28,"BiorxivClusteringP2P":37.42,"BiorxivClusteringS2S":29.32,"MedrxivClusteringP2P":34.68,"MedrxivClusteringS2S":30.34,"RedditClustering":50.76,"RedditClusteringP2P":55.02,"StackExchangeClustering":59.66,"StackExchangeClusteringP2P":32.11,"TwentyNewsgroupsClustering":47.01} -{"level_0":118,"index":103,"Rank":119,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.88,"ArxivClusteringP2P":44.94,"ArxivClusteringS2S":35.87,"BiorxivClusteringP2P":35.68,"BiorxivClusteringS2S":30.47,"MedrxivClusteringP2P":30.79,"MedrxivClusteringS2S":27.95,"RedditClustering":50.47,"RedditClusteringP2P":60.54,"StackExchangeClustering":60.7,"StackExchangeClusteringP2P":33.98,"TwentyNewsgroupsClustering":38.28} -{"level_0":119,"index":183,"Rank":120,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.86,"ArxivClusteringP2P":47.03,"ArxivClusteringS2S":38.38,"BiorxivClusteringP2P":38.98,"BiorxivClusteringS2S":34.94,"MedrxivClusteringP2P":33.98,"MedrxivClusteringS2S":31.67,"RedditClustering":42.72,"RedditClusteringP2P":56.93,"StackExchangeClustering":53.6,"StackExchangeClusteringP2P":32.87,"TwentyNewsgroupsClustering":38.38} -{"level_0":120,"index":61,"Rank":121,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":40.83,"ArxivClusteringP2P":47.81,"ArxivClusteringS2S":40.53,"BiorxivClusteringP2P":38.12,"BiorxivClusteringS2S":31.25,"MedrxivClusteringP2P":30.94,"MedrxivClusteringS2S":28.04,"RedditClustering":42.84,"RedditClusteringP2P":60.1,"StackExchangeClustering":65.12,"StackExchangeClusteringP2P":33.61,"TwentyNewsgroupsClustering":30.76} -{"level_0":121,"index":132,"Rank":122,"Model":"embedder-100p<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.83,"ArxivClusteringP2P":42.9,"ArxivClusteringS2S":32.45,"BiorxivClusteringP2P":37.24,"BiorxivClusteringS2S":29.75,"MedrxivClusteringP2P":36.65,"MedrxivClusteringS2S":32.57,"RedditClustering":50.62,"RedditClusteringP2P":54.44,"StackExchangeClustering":54.26,"StackExchangeClusteringP2P":32.52,"TwentyNewsgroupsClustering":45.7} -{"level_0":122,"index":169,"Rank":123,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.81,"ArxivClusteringP2P":45.66,"ArxivClusteringS2S":38.42,"BiorxivClusteringP2P":35.54,"BiorxivClusteringS2S":32.19,"MedrxivClusteringP2P":30.95,"MedrxivClusteringS2S":28.87,"RedditClustering":47.9,"RedditClusteringP2P":55.95,"StackExchangeClustering":60.27,"StackExchangeClusteringP2P":32.34,"TwentyNewsgroupsClustering":40.81} -{"level_0":123,"index":167,"Rank":124,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.74,"ArxivClusteringP2P":45.37,"ArxivClusteringS2S":36.52,"BiorxivClusteringP2P":36.52,"BiorxivClusteringS2S":31.98,"MedrxivClusteringP2P":31.71,"MedrxivClusteringS2S":28.8,"RedditClustering":49.71,"RedditClusteringP2P":55.07,"StackExchangeClustering":59.42,"StackExchangeClusteringP2P":31.7,"TwentyNewsgroupsClustering":41.29} -{"level_0":124,"index":69,"Rank":125,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.7,"ArxivClusteringP2P":46.39,"ArxivClusteringS2S":35.41,"BiorxivClusteringP2P":38.98,"BiorxivClusteringS2S":31.74,"MedrxivClusteringP2P":33.13,"MedrxivClusteringS2S":30.29,"RedditClustering":47.53,"RedditClusteringP2P":56.03,"StackExchangeClustering":53.87,"StackExchangeClusteringP2P":33.57,"TwentyNewsgroupsClustering":40.77} -{"level_0":125,"index":65,"Rank":126,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":40.63,"ArxivClusteringP2P":47.56,"ArxivClusteringS2S":39.92,"BiorxivClusteringP2P":36.14,"BiorxivClusteringS2S":30.26,"MedrxivClusteringP2P":30.11,"MedrxivClusteringS2S":26.93,"RedditClustering":41.83,"RedditClusteringP2P":62.08,"StackExchangeClustering":67.34,"StackExchangeClusteringP2P":34.5,"TwentyNewsgroupsClustering":30.26} -{"level_0":126,"index":267,"Rank":127,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.54,"ArxivClusteringP2P":43.24,"ArxivClusteringS2S":36.49,"BiorxivClusteringP2P":35.56,"BiorxivClusteringS2S":31.02,"MedrxivClusteringP2P":31.7,"MedrxivClusteringS2S":27.76,"RedditClustering":49.16,"RedditClusteringP2P":61.55,"StackExchangeClustering":56.77,"StackExchangeClusteringP2P":32.04,"TwentyNewsgroupsClustering":40.7} -{"level_0":127,"index":112,"Rank":128,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.4,"ArxivClusteringP2P":41.78,"ArxivClusteringS2S":34.13,"BiorxivClusteringP2P":35.01,"BiorxivClusteringS2S":31.0,"MedrxivClusteringP2P":29.71,"MedrxivClusteringS2S":28.0,"RedditClustering":49.53,"RedditClusteringP2P":59.71,"StackExchangeClustering":60.73,"StackExchangeClusteringP2P":34.64,"TwentyNewsgroupsClustering":40.12} -{"level_0":128,"index":83,"Rank":129,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.34,"ArxivClusteringP2P":45.59,"ArxivClusteringS2S":38.86,"BiorxivClusteringP2P":36.55,"BiorxivClusteringS2S":33.7,"MedrxivClusteringP2P":31.51,"MedrxivClusteringS2S":28.76,"RedditClustering":40.45,"RedditClusteringP2P":55.75,"StackExchangeClustering":59.21,"StackExchangeClusteringP2P":33.95,"TwentyNewsgroupsClustering":39.46} -{"level_0":129,"index":212,"Rank":130,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":40.31,"ArxivClusteringP2P":41.8,"ArxivClusteringS2S":32.41,"BiorxivClusteringP2P":34.81,"BiorxivClusteringS2S":28.59,"MedrxivClusteringP2P":32.73,"MedrxivClusteringS2S":29.91,"RedditClustering":50.31,"RedditClusteringP2P":56.57,"StackExchangeClustering":57.99,"StackExchangeClusteringP2P":33.64,"TwentyNewsgroupsClustering":44.61} -{"level_0":130,"index":243,"Rank":131,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":40.21,"ArxivClusteringP2P":39.28,"ArxivClusteringS2S":27.26,"BiorxivClusteringP2P":33.99,"BiorxivClusteringS2S":22.92,"MedrxivClusteringP2P":33.2,"MedrxivClusteringS2S":26.13,"RedditClustering":52.93,"RedditClusteringP2P":59.67,"StackExchangeClustering":63.13,"StackExchangeClusteringP2P":35.68,"TwentyNewsgroupsClustering":48.1} -{"level_0":131,"index":136,"Rank":132,"Model":"jina-embeddings-v2-small-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.08,"ArxivClusteringP2P":44.02,"ArxivClusteringS2S":35.16,"BiorxivClusteringP2P":35.57,"BiorxivClusteringS2S":29.07,"MedrxivClusteringP2P":31.86,"MedrxivClusteringS2S":27.51,"RedditClustering":49.28,"RedditClusteringP2P":57.09,"StackExchangeClustering":55.35,"StackExchangeClusteringP2P":34.42,"TwentyNewsgroupsClustering":41.57} -{"level_0":132,"index":177,"Rank":133,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.08,"ArxivClusteringP2P":44.02,"ArxivClusteringS2S":35.16,"BiorxivClusteringP2P":35.57,"BiorxivClusteringS2S":29.07,"MedrxivClusteringP2P":31.86,"MedrxivClusteringS2S":27.51,"RedditClustering":49.28,"RedditClusteringP2P":57.09,"StackExchangeClustering":55.35,"StackExchangeClusteringP2P":34.42,"TwentyNewsgroupsClustering":41.57} -{"level_0":133,"index":192,"Rank":134,"Model":"all-MiniLM-L6-v2-ds<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.94,"ArxivClusteringP2P":46.19,"ArxivClusteringS2S":36.91,"BiorxivClusteringP2P":38.59,"BiorxivClusteringS2S":32.32,"MedrxivClusteringP2P":32.55,"MedrxivClusteringS2S":29.92,"RedditClustering":45.63,"RedditClusteringP2P":53.37,"StackExchangeClustering":47.28,"StackExchangeClusteringP2P":34.25,"TwentyNewsgroupsClustering":42.38} -{"level_0":134,"index":158,"Rank":135,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.92,"ArxivClusteringP2P":42.12,"ArxivClusteringS2S":34.8,"BiorxivClusteringP2P":35.89,"BiorxivClusteringS2S":30.05,"MedrxivClusteringP2P":31.34,"MedrxivClusteringS2S":27.88,"RedditClustering":45.72,"RedditClusteringP2P":59.66,"StackExchangeClustering":58.51,"StackExchangeClusteringP2P":31.98,"TwentyNewsgroupsClustering":41.13} -{"level_0":135,"index":79,"Rank":136,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.92,"ArxivClusteringP2P":43.38,"ArxivClusteringS2S":33.71,"BiorxivClusteringP2P":35.06,"BiorxivClusteringS2S":30.71,"MedrxivClusteringP2P":32.08,"MedrxivClusteringS2S":29.45,"RedditClustering":48.23,"RedditClusteringP2P":53.18,"StackExchangeClustering":60.86,"StackExchangeClusteringP2P":32.36,"TwentyNewsgroupsClustering":40.06} -{"level_0":136,"index":82,"Rank":137,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.83,"ArxivClusteringP2P":44.72,"ArxivClusteringS2S":35.08,"BiorxivClusteringP2P":34.41,"BiorxivClusteringS2S":30.53,"MedrxivClusteringP2P":31.35,"MedrxivClusteringS2S":28.77,"RedditClustering":46.47,"RedditClusteringP2P":54.17,"StackExchangeClustering":59.19,"StackExchangeClusteringP2P":32.57,"TwentyNewsgroupsClustering":40.89} -{"level_0":137,"index":76,"Rank":138,"Model":"gte-micro-v4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.54,"ArxivClusteringP2P":42.86,"ArxivClusteringS2S":32.48,"BiorxivClusteringP2P":36.83,"BiorxivClusteringS2S":29.24,"MedrxivClusteringP2P":32.55,"MedrxivClusteringS2S":30.8,"RedditClustering":48.28,"RedditClusteringP2P":53.56,"StackExchangeClustering":55.07,"StackExchangeClusteringP2P":31.92,"TwentyNewsgroupsClustering":41.37} -{"level_0":138,"index":157,"Rank":139,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":39.51,"ArxivClusteringP2P":44.14,"ArxivClusteringS2S":37.14,"BiorxivClusteringP2P":35.81,"BiorxivClusteringS2S":31.86,"MedrxivClusteringP2P":31.34,"MedrxivClusteringS2S":28.2,"RedditClustering":42.87,"RedditClusteringP2P":56.39,"StackExchangeClustering":59.08,"StackExchangeClusteringP2P":30.3,"TwentyNewsgroupsClustering":37.51} -{"level_0":139,"index":116,"Rank":140,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.48,"ArxivClusteringP2P":45.31,"ArxivClusteringS2S":35.45,"BiorxivClusteringP2P":38.38,"BiorxivClusteringS2S":31.09,"MedrxivClusteringP2P":33.35,"MedrxivClusteringS2S":29.79,"RedditClustering":44.12,"RedditClusteringP2P":48.68,"StackExchangeClustering":53.11,"StackExchangeClusteringP2P":33.91,"TwentyNewsgroupsClustering":41.13} -{"level_0":140,"index":105,"Rank":141,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.46,"ArxivClusteringP2P":44.6,"ArxivClusteringS2S":34.48,"BiorxivClusteringP2P":37.13,"BiorxivClusteringS2S":29.72,"MedrxivClusteringP2P":31.98,"MedrxivClusteringS2S":29.26,"RedditClustering":45.27,"RedditClusteringP2P":54.47,"StackExchangeClustering":53.11,"StackExchangeClusteringP2P":34.67,"TwentyNewsgroupsClustering":39.37} -{"level_0":141,"index":176,"Rank":142,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.29,"ArxivClusteringP2P":41.55,"ArxivClusteringS2S":32.19,"BiorxivClusteringP2P":35.17,"BiorxivClusteringS2S":28.89,"MedrxivClusteringP2P":32.19,"MedrxivClusteringS2S":28.78,"RedditClustering":48.28,"RedditClusteringP2P":56.03,"StackExchangeClustering":55.39,"StackExchangeClusteringP2P":33.57,"TwentyNewsgroupsClustering":40.2} -{"level_0":142,"index":106,"Rank":143,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.18,"ArxivClusteringP2P":44.53,"ArxivClusteringS2S":33.25,"BiorxivClusteringP2P":36.11,"BiorxivClusteringS2S":28.06,"MedrxivClusteringP2P":31.56,"MedrxivClusteringS2S":28.13,"RedditClustering":45.46,"RedditClusteringP2P":55.16,"StackExchangeClustering":54.51,"StackExchangeClusteringP2P":34.07,"TwentyNewsgroupsClustering":40.19} -{"level_0":143,"index":166,"Rank":144,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.11,"ArxivClusteringP2P":44.52,"ArxivClusteringS2S":34.45,"BiorxivClusteringP2P":35.25,"BiorxivClusteringS2S":30.71,"MedrxivClusteringP2P":29.43,"MedrxivClusteringS2S":28.87,"RedditClustering":44.52,"RedditClusteringP2P":53.61,"StackExchangeClustering":57.11,"StackExchangeClusteringP2P":31.91,"TwentyNewsgroupsClustering":39.8} -{"level_0":144,"index":174,"Rank":145,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.99,"ArxivClusteringP2P":41.43,"ArxivClusteringS2S":32.09,"BiorxivClusteringP2P":34.97,"BiorxivClusteringS2S":28.78,"MedrxivClusteringP2P":31.33,"MedrxivClusteringS2S":28.76,"RedditClustering":47.75,"RedditClusteringP2P":54.88,"StackExchangeClustering":55.38,"StackExchangeClusteringP2P":33.08,"TwentyNewsgroupsClustering":40.48} -{"level_0":145,"index":123,"Rank":146,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.93,"ArxivClusteringP2P":44.59,"ArxivClusteringS2S":38.03,"BiorxivClusteringP2P":36.03,"BiorxivClusteringS2S":32.48,"MedrxivClusteringP2P":31.05,"MedrxivClusteringS2S":29.26,"RedditClustering":35.53,"RedditClusteringP2P":54.52,"StackExchangeClustering":55.13,"StackExchangeClusteringP2P":34.31,"TwentyNewsgroupsClustering":37.28} -{"level_0":146,"index":104,"Rank":147,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.86,"ArxivClusteringP2P":43.5,"ArxivClusteringS2S":32.09,"BiorxivClusteringP2P":34.48,"BiorxivClusteringS2S":26.14,"MedrxivClusteringP2P":30.57,"MedrxivClusteringS2S":26.22,"RedditClustering":48.32,"RedditClusteringP2P":57.84,"StackExchangeClustering":57.49,"StackExchangeClusteringP2P":34.58,"TwentyNewsgroupsClustering":36.28} -{"level_0":147,"index":235,"Rank":148,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":38.63,"ArxivClusteringP2P":35.49,"ArxivClusteringS2S":27.18,"BiorxivClusteringP2P":27.66,"BiorxivClusteringS2S":23.25,"MedrxivClusteringP2P":27.57,"MedrxivClusteringS2S":25.13,"RedditClustering":56.13,"RedditClusteringP2P":58.53,"StackExchangeClustering":64.21,"StackExchangeClusteringP2P":33.01,"TwentyNewsgroupsClustering":46.72} -{"level_0":148,"index":242,"Rank":149,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":38.4,"ArxivClusteringP2P":37.78,"ArxivClusteringS2S":31.68,"BiorxivClusteringP2P":33.09,"BiorxivClusteringS2S":29.6,"MedrxivClusteringP2P":31.96,"MedrxivClusteringS2S":31.7,"RedditClustering":45.24,"RedditClusteringP2P":51.31,"StackExchangeClustering":52.98,"StackExchangeClusteringP2P":32.94,"TwentyNewsgroupsClustering":44.1} -{"level_0":149,"index":203,"Rank":150,"Model":"bge-small-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.37,"ArxivClusteringP2P":43.11,"ArxivClusteringS2S":34.41,"BiorxivClusteringP2P":34.7,"BiorxivClusteringS2S":28.84,"MedrxivClusteringP2P":30.49,"MedrxivClusteringS2S":28.8,"RedditClustering":42.86,"RedditClusteringP2P":54.27,"StackExchangeClustering":53.09,"StackExchangeClusteringP2P":33.15,"TwentyNewsgroupsClustering":38.35} -{"level_0":150,"index":113,"Rank":151,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.03,"ArxivClusteringP2P":43.85,"ArxivClusteringS2S":29.59,"BiorxivClusteringP2P":37.8,"BiorxivClusteringS2S":26.76,"MedrxivClusteringP2P":32.54,"MedrxivClusteringS2S":28.05,"RedditClustering":43.68,"RedditClusteringP2P":54.33,"StackExchangeClustering":51.77,"StackExchangeClusteringP2P":33.31,"TwentyNewsgroupsClustering":36.65} -{"level_0":151,"index":128,"Rank":152,"Model":"cai-stellaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.92,"ArxivClusteringP2P":42.01,"ArxivClusteringS2S":31.41,"BiorxivClusteringP2P":34.73,"BiorxivClusteringS2S":29.28,"MedrxivClusteringP2P":31.77,"MedrxivClusteringS2S":30.94,"RedditClustering":43.09,"RedditClusteringP2P":56.54,"StackExchangeClustering":48.23,"StackExchangeClusteringP2P":34.76,"TwentyNewsgroupsClustering":34.36} -{"level_0":152,"index":159,"Rank":153,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":37.89,"ArxivClusteringP2P":40.28,"ArxivClusteringS2S":35.42,"BiorxivClusteringP2P":35.04,"BiorxivClusteringS2S":29.46,"MedrxivClusteringP2P":28.92,"MedrxivClusteringS2S":28.43,"RedditClustering":42.41,"RedditClusteringP2P":55.17,"StackExchangeClustering":55.27,"StackExchangeClusteringP2P":30.46,"TwentyNewsgroupsClustering":35.97} -{"level_0":153,"index":258,"Rank":154,"Model":"universal-sentence-encoder-large-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.82,"ArxivClusteringP2P":34.72,"ArxivClusteringS2S":25.27,"BiorxivClusteringP2P":28.39,"BiorxivClusteringS2S":20.52,"MedrxivClusteringP2P":30.27,"MedrxivClusteringS2S":24.58,"RedditClustering":56.93,"RedditClusteringP2P":58.95,"StackExchangeClustering":60.85,"StackExchangeClusteringP2P":33.14,"TwentyNewsgroupsClustering":42.43} -{"level_0":154,"index":239,"Rank":155,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":37.64,"ArxivClusteringP2P":36.94,"ArxivClusteringS2S":29.03,"BiorxivClusteringP2P":32.35,"BiorxivClusteringS2S":28.16,"MedrxivClusteringP2P":30.23,"MedrxivClusteringS2S":27.01,"RedditClustering":48.04,"RedditClusteringP2P":53.53,"StackExchangeClustering":59.54,"StackExchangeClusteringP2P":30.48,"TwentyNewsgroupsClustering":38.68} -{"level_0":155,"index":68,"Rank":156,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.6,"ArxivClusteringP2P":40.28,"ArxivClusteringS2S":31.06,"BiorxivClusteringP2P":35.18,"BiorxivClusteringS2S":28.18,"MedrxivClusteringP2P":30.4,"MedrxivClusteringS2S":28.79,"RedditClustering":45.93,"RedditClusteringP2P":51.44,"StackExchangeClustering":52.18,"StackExchangeClusteringP2P":30.67,"TwentyNewsgroupsClustering":39.44} -{"level_0":156,"index":277,"Rank":157,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.52,"ArxivClusteringP2P":41.49,"ArxivClusteringS2S":28.47,"BiorxivClusteringP2P":36.86,"BiorxivClusteringS2S":27.55,"MedrxivClusteringP2P":31.09,"MedrxivClusteringS2S":26.5,"RedditClustering":42.47,"RedditClusteringP2P":58.1,"StackExchangeClustering":53.52,"StackExchangeClusteringP2P":30.43,"TwentyNewsgroupsClustering":36.26} -{"level_0":157,"index":67,"Rank":158,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":37.45,"ArxivClusteringP2P":42.92,"ArxivClusteringS2S":35.2,"BiorxivClusteringP2P":35.02,"BiorxivClusteringS2S":27.21,"MedrxivClusteringP2P":30.15,"MedrxivClusteringS2S":26.96,"RedditClustering":38.67,"RedditClusteringP2P":53.42,"StackExchangeClustering":59.35,"StackExchangeClusteringP2P":31.47,"TwentyNewsgroupsClustering":31.54} -{"level_0":158,"index":172,"Rank":159,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.15,"ArxivClusteringP2P":40.51,"ArxivClusteringS2S":31.58,"BiorxivClusteringP2P":33.56,"BiorxivClusteringS2S":28.44,"MedrxivClusteringP2P":30.12,"MedrxivClusteringS2S":25.26,"RedditClustering":45.05,"RedditClusteringP2P":55.14,"StackExchangeClustering":45.24,"StackExchangeClusteringP2P":33.37,"TwentyNewsgroupsClustering":40.42} -{"level_0":159,"index":241,"Rank":160,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":37.14,"ArxivClusteringP2P":38.33,"ArxivClusteringS2S":31.55,"BiorxivClusteringP2P":33.49,"BiorxivClusteringS2S":29.44,"MedrxivClusteringP2P":31.52,"MedrxivClusteringS2S":30.87,"RedditClustering":42.02,"RedditClusteringP2P":50.73,"StackExchangeClustering":49.6,"StackExchangeClusteringP2P":31.69,"TwentyNewsgroupsClustering":39.28} -{"level_0":160,"index":162,"Rank":161,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":37.08,"ArxivClusteringP2P":39.22,"ArxivClusteringS2S":30.8,"BiorxivClusteringP2P":35.75,"BiorxivClusteringS2S":27.05,"MedrxivClusteringP2P":30.9,"MedrxivClusteringS2S":27.26,"RedditClustering":39.13,"RedditClusteringP2P":58.98,"StackExchangeClustering":53.52,"StackExchangeClusteringP2P":32.07,"TwentyNewsgroupsClustering":33.22} -{"level_0":161,"index":84,"Rank":162,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.98,"ArxivClusteringP2P":40.55,"ArxivClusteringS2S":32.49,"BiorxivClusteringP2P":33.59,"BiorxivClusteringS2S":29.13,"MedrxivClusteringP2P":30.33,"MedrxivClusteringS2S":28.02,"RedditClustering":42.17,"RedditClusteringP2P":48.02,"StackExchangeClustering":54.13,"StackExchangeClusteringP2P":31.12,"TwentyNewsgroupsClustering":37.2} -{"level_0":162,"index":168,"Rank":163,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.89,"ArxivClusteringP2P":40.27,"ArxivClusteringS2S":32.0,"BiorxivClusteringP2P":33.27,"BiorxivClusteringS2S":28.65,"MedrxivClusteringP2P":27.85,"MedrxivClusteringS2S":27.71,"RedditClustering":41.34,"RedditClusteringP2P":51.2,"StackExchangeClustering":54.51,"StackExchangeClusteringP2P":31.12,"TwentyNewsgroupsClustering":37.88} -{"level_0":163,"index":73,"Rank":164,"Model":"gte-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":35.96,"ArxivClusteringP2P":35.24,"ArxivClusteringS2S":31.09,"BiorxivClusteringP2P":30.2,"BiorxivClusteringS2S":27.38,"MedrxivClusteringP2P":27.17,"MedrxivClusteringS2S":27.52,"RedditClustering":45.62,"RedditClusteringP2P":47.87,"StackExchangeClustering":52.62,"StackExchangeClusteringP2P":30.05,"TwentyNewsgroupsClustering":40.79} -{"level_0":164,"index":171,"Rank":165,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":35.88,"ArxivClusteringP2P":39.25,"ArxivClusteringS2S":29.08,"BiorxivClusteringP2P":32.2,"BiorxivClusteringS2S":26.63,"MedrxivClusteringP2P":29.6,"MedrxivClusteringS2S":25.01,"RedditClustering":42.85,"RedditClusteringP2P":52.55,"StackExchangeClustering":48.07,"StackExchangeClusteringP2P":31.36,"TwentyNewsgroupsClustering":38.06} -{"level_0":165,"index":80,"Rank":166,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":35.79,"ArxivClusteringP2P":39.71,"ArxivClusteringS2S":28.24,"BiorxivClusteringP2P":33.63,"BiorxivClusteringS2S":27.04,"MedrxivClusteringP2P":31.37,"MedrxivClusteringS2S":26.87,"RedditClustering":40.23,"RedditClusteringP2P":49.09,"StackExchangeClustering":52.74,"StackExchangeClusteringP2P":32.66,"TwentyNewsgroupsClustering":32.13} -{"level_0":166,"index":77,"Rank":167,"Model":"test24<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":35.78,"ArxivClusteringP2P":41.21,"ArxivClusteringS2S":30.2,"BiorxivClusteringP2P":35.62,"BiorxivClusteringS2S":25.4,"MedrxivClusteringP2P":31.42,"MedrxivClusteringS2S":27.14,"RedditClustering":39.24,"RedditClusteringP2P":52.2,"StackExchangeClustering":46.43,"StackExchangeClusteringP2P":34.31,"TwentyNewsgroupsClustering":30.43} -{"level_0":167,"index":257,"Rank":168,"Model":"universal-sentence-encoder-4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":35.71,"ArxivClusteringP2P":35.13,"ArxivClusteringS2S":23.46,"BiorxivClusteringP2P":31.17,"BiorxivClusteringS2S":18.81,"MedrxivClusteringP2P":28.88,"MedrxivClusteringS2S":23.31,"RedditClustering":49.72,"RedditClusteringP2P":57.92,"StackExchangeClustering":54.64,"StackExchangeClusteringP2P":31.58,"TwentyNewsgroupsClustering":38.23} -{"level_0":168,"index":134,"Rank":169,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":35.67,"ArxivClusteringP2P":37.75,"ArxivClusteringS2S":27.7,"BiorxivClusteringP2P":30.24,"BiorxivClusteringS2S":22.67,"MedrxivClusteringP2P":28.41,"MedrxivClusteringS2S":26.14,"RedditClustering":46.29,"RedditClusteringP2P":50.95,"StackExchangeClustering":48.08,"StackExchangeClusteringP2P":33.35,"TwentyNewsgroupsClustering":40.79} -{"level_0":169,"index":260,"Rank":170,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":35.06,"ArxivClusteringP2P":34.75,"ArxivClusteringS2S":22.62,"BiorxivClusteringP2P":28.75,"BiorxivClusteringS2S":20.14,"MedrxivClusteringP2P":31.2,"MedrxivClusteringS2S":25.8,"RedditClustering":46.17,"RedditClusteringP2P":56.53,"StackExchangeClustering":49.34,"StackExchangeClusteringP2P":33.4,"TwentyNewsgroupsClustering":36.93} -{"level_0":170,"index":259,"Rank":171,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":34.82,"ArxivClusteringP2P":33.72,"ArxivClusteringS2S":23.99,"BiorxivClusteringP2P":30.88,"BiorxivClusteringS2S":21.05,"MedrxivClusteringP2P":28.68,"MedrxivClusteringS2S":24.25,"RedditClustering":43.82,"RedditClusteringP2P":58.37,"StackExchangeClustering":47.83,"StackExchangeClusteringP2P":33.01,"TwentyNewsgroupsClustering":37.47} -{"level_0":171,"index":231,"Rank":172,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":34.06,"ArxivClusteringP2P":44.75,"ArxivClusteringS2S":35.27,"BiorxivClusteringP2P":39.52,"BiorxivClusteringS2S":34.53,"MedrxivClusteringP2P":35.04,"MedrxivClusteringS2S":31.66,"RedditClustering":24.13,"RedditClusteringP2P":35.06,"StackExchangeClustering":39.01,"StackExchangeClusteringP2P":31.46,"TwentyNewsgroupsClustering":24.22} -{"level_0":172,"index":251,"Rank":173,"Model":"Angle_BERT<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":33.47,"ArxivClusteringP2P":35.33,"ArxivClusteringS2S":27.66,"BiorxivClusteringP2P":30.95,"BiorxivClusteringS2S":24.81,"MedrxivClusteringP2P":27.35,"MedrxivClusteringS2S":25.59,"RedditClustering":40.32,"RedditClusteringP2P":45.99,"StackExchangeClustering":48.26,"StackExchangeClusteringP2P":28.87,"TwentyNewsgroupsClustering":33.07} -{"level_0":173,"index":217,"Rank":174,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":33.43,"ArxivClusteringP2P":35.18,"ArxivClusteringS2S":27.54,"BiorxivClusteringP2P":30.15,"BiorxivClusteringS2S":24.67,"MedrxivClusteringP2P":26.25,"MedrxivClusteringS2S":24.12,"RedditClustering":40.23,"RedditClusteringP2P":47.74,"StackExchangeClustering":47.55,"StackExchangeClusteringP2P":29.45,"TwentyNewsgroupsClustering":34.86} -{"level_0":174,"index":78,"Rank":175,"Model":"test25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":33.15,"ArxivClusteringP2P":37.86,"ArxivClusteringS2S":28.84,"BiorxivClusteringP2P":32.09,"BiorxivClusteringS2S":23.55,"MedrxivClusteringP2P":28.54,"MedrxivClusteringS2S":24.73,"RedditClustering":35.54,"RedditClusteringP2P":47.43,"StackExchangeClustering":46.54,"StackExchangeClusteringP2P":30.88,"TwentyNewsgroupsClustering":28.68} -{"level_0":175,"index":72,"Rank":176,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":32.91,"ArxivClusteringP2P":35.84,"ArxivClusteringS2S":27.3,"BiorxivClusteringP2P":32.35,"BiorxivClusteringS2S":24.06,"MedrxivClusteringP2P":28.24,"MedrxivClusteringS2S":25.34,"RedditClustering":35.92,"RedditClusteringP2P":46.08,"StackExchangeClustering":45.31,"StackExchangeClusteringP2P":29.91,"TwentyNewsgroupsClustering":31.69} -{"level_0":176,"index":173,"Rank":177,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":32.56,"ArxivClusteringP2P":34.17,"ArxivClusteringS2S":23.99,"BiorxivClusteringP2P":28.51,"BiorxivClusteringS2S":20.94,"MedrxivClusteringP2P":27.24,"MedrxivClusteringS2S":23.27,"RedditClustering":37.95,"RedditClusteringP2P":49.91,"StackExchangeClustering":46.35,"StackExchangeClusteringP2P":31.46,"TwentyNewsgroupsClustering":34.39} -{"level_0":177,"index":285,"Rank":178,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":32.28,"ArxivClusteringP2P":35.27,"ArxivClusteringS2S":23.18,"BiorxivClusteringP2P":31.13,"BiorxivClusteringS2S":26.78,"MedrxivClusteringP2P":24.65,"MedrxivClusteringS2S":24.21,"RedditClustering":38.74,"RedditClusteringP2P":51.92,"StackExchangeClustering":42.7,"StackExchangeClusteringP2P":28.7,"TwentyNewsgroupsClustering":27.82} -{"level_0":178,"index":30,"Rank":179,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":31.67,"ArxivClusteringP2P":33.7,"ArxivClusteringS2S":23.04,"BiorxivClusteringP2P":32.7,"BiorxivClusteringS2S":23.28,"MedrxivClusteringP2P":31.94,"MedrxivClusteringS2S":28.05,"RedditClustering":30.83,"RedditClusteringP2P":46.29,"StackExchangeClustering":39.44,"StackExchangeClusteringP2P":32.61,"TwentyNewsgroupsClustering":26.54} -{"level_0":179,"index":81,"Rank":180,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":30.95,"ArxivClusteringP2P":34.74,"ArxivClusteringS2S":24.68,"BiorxivClusteringP2P":28.93,"BiorxivClusteringS2S":23.08,"MedrxivClusteringP2P":28.3,"MedrxivClusteringS2S":24.93,"RedditClustering":33.76,"RedditClusteringP2P":41.01,"StackExchangeClustering":44.59,"StackExchangeClusteringP2P":28.23,"TwentyNewsgroupsClustering":28.24} -{"level_0":180,"index":248,"Rank":181,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":30.61,"ArxivClusteringP2P":32.32,"ArxivClusteringS2S":25.5,"BiorxivClusteringP2P":28.99,"BiorxivClusteringS2S":23.2,"MedrxivClusteringP2P":29.44,"MedrxivClusteringS2S":26.16,"RedditClustering":31.25,"RedditClusteringP2P":43.3,"StackExchangeClustering":34.36,"StackExchangeClusteringP2P":30.64,"TwentyNewsgroupsClustering":31.58} -{"level_0":181,"index":70,"Rank":182,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":30.21,"ArxivClusteringP2P":33.04,"ArxivClusteringS2S":24.68,"BiorxivClusteringP2P":30.86,"BiorxivClusteringS2S":21.1,"MedrxivClusteringP2P":27.17,"MedrxivClusteringS2S":23.78,"RedditClustering":31.45,"RedditClusteringP2P":43.69,"StackExchangeClustering":39.16,"StackExchangeClusteringP2P":29.18,"TwentyNewsgroupsClustering":28.23} -{"level_0":182,"index":122,"Rank":183,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":30.12,"ArxivClusteringP2P":35.19,"ArxivClusteringS2S":27.51,"BiorxivClusteringP2P":30.12,"BiorxivClusteringS2S":24.77,"MedrxivClusteringP2P":26.09,"MedrxivClusteringS2S":23.6,"RedditClustering":27.24,"RedditClusteringP2P":43.32,"StackExchangeClustering":43.58,"StackExchangeClusteringP2P":26.55,"TwentyNewsgroupsClustering":23.35} -{"level_0":183,"index":227,"Rank":184,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":29.55,"ArxivClusteringP2P":32.13,"ArxivClusteringS2S":22.05,"BiorxivClusteringP2P":29.84,"BiorxivClusteringS2S":20.57,"MedrxivClusteringP2P":30.13,"MedrxivClusteringS2S":24.82,"RedditClustering":28.79,"RedditClusteringP2P":49.14,"StackExchangeClustering":35.43,"StackExchangeClusteringP2P":28.83,"TwentyNewsgroupsClustering":23.28} -{"level_0":184,"index":218,"Rank":185,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":29.04,"ArxivClusteringP2P":32.61,"ArxivClusteringS2S":24.68,"BiorxivClusteringP2P":24.9,"BiorxivClusteringS2S":19.55,"MedrxivClusteringP2P":23.6,"MedrxivClusteringS2S":21.97,"RedditClustering":32.18,"RedditClusteringP2P":45.14,"StackExchangeClustering":43.07,"StackExchangeClusteringP2P":28.5,"TwentyNewsgroupsClustering":23.21} -{"level_0":185,"index":256,"Rank":186,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.04,"ArxivClusteringP2P":38.31,"ArxivClusteringS2S":27.56,"BiorxivClusteringP2P":33.35,"BiorxivClusteringS2S":24.18,"MedrxivClusteringP2P":28.35,"MedrxivClusteringS2S":23.71,"RedditClustering":22.47,"RedditClusteringP2P":39.66,"StackExchangeClustering":32.9,"StackExchangeClusteringP2P":26.0,"TwentyNewsgroupsClustering":22.9} -{"level_0":186,"index":71,"Rank":187,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":28.01,"ArxivClusteringP2P":31.76,"ArxivClusteringS2S":21.06,"BiorxivClusteringP2P":29.84,"BiorxivClusteringS2S":18.34,"MedrxivClusteringP2P":27.42,"MedrxivClusteringS2S":22.41,"RedditClustering":26.71,"RedditClusteringP2P":44.14,"StackExchangeClustering":32.84,"StackExchangeClusteringP2P":27.48,"TwentyNewsgroupsClustering":26.09} -{"level_0":187,"index":232,"Rank":188,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":27.73,"ArxivClusteringP2P":32.56,"ArxivClusteringS2S":23.14,"BiorxivClusteringP2P":29.27,"BiorxivClusteringS2S":19.18,"MedrxivClusteringP2P":26.12,"MedrxivClusteringS2S":20.38,"RedditClustering":28.46,"RedditClusteringP2P":35.82,"StackExchangeClustering":35.8,"StackExchangeClusteringP2P":28.51,"TwentyNewsgroupsClustering":25.83} -{"level_0":188,"index":233,"Rank":189,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":26.57,"ArxivClusteringP2P":34.73,"ArxivClusteringS2S":26.01,"BiorxivClusteringP2P":29.76,"BiorxivClusteringS2S":20.71,"MedrxivClusteringP2P":26.65,"MedrxivClusteringS2S":21.5,"RedditClustering":28.84,"RedditClusteringP2P":7.37,"StackExchangeClustering":39.04,"StackExchangeClusteringP2P":30.23,"TwentyNewsgroupsClustering":27.42} -{"level_0":189,"index":141,"Rank":190,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":26.2,"ArxivClusteringP2P":26.81,"ArxivClusteringS2S":24.35,"BiorxivClusteringP2P":20.62,"BiorxivClusteringS2S":19.08,"MedrxivClusteringP2P":19.06,"MedrxivClusteringS2S":19.8,"RedditClustering":28.52,"RedditClusteringP2P":38.63,"StackExchangeClustering":46.33,"StackExchangeClusteringP2P":20.57,"TwentyNewsgroupsClustering":24.41} -{"level_0":190,"index":255,"Rank":191,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":18.0,"ArxivClusteringP2P":14.79,"ArxivClusteringS2S":12.25,"BiorxivClusteringP2P":13.94,"BiorxivClusteringS2S":9.79,"MedrxivClusteringP2P":15.7,"MedrxivClusteringS2S":14.89,"RedditClustering":18.38,"RedditClusteringP2P":27.1,"StackExchangeClustering":23.66,"StackExchangeClusteringP2P":27.34,"TwentyNewsgroupsClustering":20.17} -{"level_0":191,"index":11,"Rank":192,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":15.28,"ArxivClusteringP2P":17.77,"ArxivClusteringS2S":12.39,"BiorxivClusteringP2P":12.4,"BiorxivClusteringS2S":8.83,"MedrxivClusteringP2P":17.91,"MedrxivClusteringS2S":16.63,"RedditClustering":9.96,"RedditClusteringP2P":26.42,"StackExchangeClustering":15.79,"StackExchangeClusteringP2P":18.63,"TwentyNewsgroupsClustering":11.38} -{"level_0":192,"index":263,"Rank":193,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":0.41,"ArxivClusteringP2P":0.44,"ArxivClusteringS2S":0.37,"BiorxivClusteringP2P":0.36,"BiorxivClusteringS2S":0.31,"MedrxivClusteringP2P":0.32,"MedrxivClusteringS2S":0.29,"RedditClustering":0.5,"RedditClusteringP2P":0.55,"StackExchangeClustering":0.58,"StackExchangeClusteringP2P":0.3,"TwentyNewsgroupsClustering":0.46} -{"level_0":193,"index":37,"Rank":206,"Model":"openai_clip_embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":"","ArxivClusteringS2S":"","BiorxivClusteringP2P":29.33,"BiorxivClusteringS2S":27.81,"MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":"","RedditClusteringP2P":"","StackExchangeClustering":"","StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":""} -{"level_0":194,"index":74,"Rank":223,"Model":"gte-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":41.14,"ArxivClusteringS2S":31.79,"BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":"","RedditClusteringP2P":"","StackExchangeClustering":"","StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":""} -{"level_0":195,"index":75,"Rank":224,"Model":"gte-micro-v3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":41.14,"ArxivClusteringS2S":31.79,"BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":"","RedditClusteringP2P":"","StackExchangeClustering":"","StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":""} -{"level_0":196,"index":97,"Rank":234,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":"","ArxivClusteringS2S":30.15,"BiorxivClusteringP2P":"","BiorxivClusteringS2S":24.29,"MedrxivClusteringP2P":"","MedrxivClusteringS2S":23.01,"RedditClustering":21.29,"RedditClusteringP2P":"","StackExchangeClustering":35.55,"StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":18.35} -{"level_0":197,"index":98,"Rank":235,"Model":"yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":"","ArxivClusteringS2S":"","BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":31.38,"MedrxivClusteringS2S":30.28,"RedditClustering":"","RedditClusteringP2P":50.51,"StackExchangeClustering":51.79,"StackExchangeClusteringP2P":30.15,"TwentyNewsgroupsClustering":41.38} -{"level_0":198,"index":124,"Rank":239,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":"","ArxivClusteringP2P":39.41,"ArxivClusteringS2S":"","BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":"","RedditClusteringP2P":"","StackExchangeClustering":"","StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":""} -{"level_0":199,"index":234,"Rank":267,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","ArxivClusteringP2P":33.59,"ArxivClusteringS2S":"","BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":"","RedditClusteringP2P":"","StackExchangeClustering":"","StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":""} -{"level_0":200,"index":272,"Rank":279,"Model":"text-search-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":"","ArxivClusteringS2S":"","BiorxivClusteringP2P":"","BiorxivClusteringS2S":26.05,"MedrxivClusteringP2P":"","MedrxivClusteringS2S":25.67,"RedditClustering":"","RedditClusteringP2P":"","StackExchangeClustering":"","StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":44.92} -{"level_0":201,"index":273,"Rank":280,"Model":"text-search-ada-doc-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":"","ArxivClusteringS2S":"","BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":"","RedditClusteringP2P":"","StackExchangeClustering":"","StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":32.92} -{"level_0":202,"index":278,"Rank":284,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":"","ArxivClusteringS2S":"","BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":45.64,"RedditClusteringP2P":"","StackExchangeClustering":53.01,"StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":42.01} -{"level_0":203,"index":279,"Rank":285,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":"","ArxivClusteringS2S":"","BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":40.79,"RedditClusteringP2P":"","StackExchangeClustering":55.14,"StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":37.64} -{"level_0":204,"index":280,"Rank":286,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArxivClusteringP2P":"","ArxivClusteringS2S":"","BiorxivClusteringP2P":"","BiorxivClusteringS2S":"","MedrxivClusteringP2P":"","MedrxivClusteringS2S":"","RedditClustering":31.78,"RedditClusteringP2P":"","StackExchangeClustering":36.86,"StackExchangeClusteringP2P":"","TwentyNewsgroupsClustering":29.33} +{"Rank":1,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":55.83,"ArxivClusteringP2P":56.4,"ArxivClusteringS2S":51.45,"BiorxivClusteringP2P":49.01,"BiorxivClusteringS2S":45.06,"MedrxivClusteringP2P":44.37,"MedrxivClusteringS2S":42.0,"RedditClustering":73.37,"RedditClusteringP2P":72.51,"StackExchangeClustering":79.07,"StackExchangeClusteringP2P":49.57,"TwentyNewsgroupsClustering":51.31} +{"Rank":2,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.35,"ArxivClusteringP2P":51.81,"ArxivClusteringS2S":44.73,"BiorxivClusteringP2P":46.07,"BiorxivClusteringS2S":40.64,"MedrxivClusteringP2P":42.94,"MedrxivClusteringS2S":41.44,"RedditClustering":68.5,"RedditClusteringP2P":64.86,"StackExchangeClustering":74.16,"StackExchangeClusteringP2P":45.1,"TwentyNewsgroupsClustering":66.62} +{"Rank":3,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":52.42,"ArxivClusteringP2P":51.95,"ArxivClusteringS2S":42.48,"BiorxivClusteringP2P":50.15,"BiorxivClusteringS2S":42.84,"MedrxivClusteringP2P":47.24,"MedrxivClusteringS2S":43.48,"RedditClustering":63.73,"RedditClusteringP2P":64.09,"StackExchangeClustering":70.71,"StackExchangeClusteringP2P":40.34,"TwentyNewsgroupsClustering":59.56} +{"Rank":4,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.01,"ArxivClusteringP2P":49.01,"ArxivClusteringS2S":44.45,"BiorxivClusteringP2P":38.03,"BiorxivClusteringS2S":36.53,"MedrxivClusteringP2P":32.7,"MedrxivClusteringS2S":31.27,"RedditClustering":67.84,"RedditClusteringP2P":67.96,"StackExchangeClustering":76.26,"StackExchangeClusteringP2P":36.88,"TwentyNewsgroupsClustering":58.14} +{"Rank":5,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":47.48,"ArxivClusteringP2P":46.27,"ArxivClusteringS2S":38.36,"BiorxivClusteringP2P":37.87,"BiorxivClusteringS2S":35.67,"MedrxivClusteringP2P":33.11,"MedrxivClusteringS2S":31.54,"RedditClustering":65.81,"RedditClusteringP2P":66.62,"StackExchangeClustering":74.52,"StackExchangeClusteringP2P":37.63,"TwentyNewsgroupsClustering":54.87} +{"Rank":6,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.4,"ArxivClusteringP2P":47.92,"ArxivClusteringS2S":42.42,"BiorxivClusteringP2P":38.72,"BiorxivClusteringS2S":36.6,"MedrxivClusteringP2P":34.04,"MedrxivClusteringS2S":32.81,"RedditClustering":61.56,"RedditClusteringP2P":65.35,"StackExchangeClustering":70.16,"StackExchangeClusteringP2P":38.23,"TwentyNewsgroupsClustering":53.56} +{"Rank":7,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.65,"ArxivClusteringP2P":46.57,"ArxivClusteringS2S":39.35,"BiorxivClusteringP2P":37.77,"BiorxivClusteringS2S":34.68,"MedrxivClusteringP2P":32.77,"MedrxivClusteringS2S":31.85,"RedditClustering":64.09,"RedditClusteringP2P":65.12,"StackExchangeClustering":72.05,"StackExchangeClusteringP2P":34.04,"TwentyNewsgroupsClustering":54.81} +{"Rank":8,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":46.45,"ArxivClusteringP2P":44.27,"ArxivClusteringS2S":46.85,"BiorxivClusteringP2P":32.35,"BiorxivClusteringS2S":36.7,"MedrxivClusteringP2P":30.71,"MedrxivClusteringS2S":32.96,"RedditClustering":61.72,"RedditClusteringP2P":63.98,"StackExchangeClustering":72.74,"StackExchangeClusteringP2P":32.26,"TwentyNewsgroupsClustering":56.41} +{"Rank":9,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.23,"ArxivClusteringP2P":47.05,"ArxivClusteringS2S":42.59,"BiorxivClusteringP2P":35.43,"BiorxivClusteringS2S":33.86,"MedrxivClusteringP2P":32.1,"MedrxivClusteringS2S":31.15,"RedditClustering":60.18,"RedditClusteringP2P":64.71,"StackExchangeClustering":71.23,"StackExchangeClusteringP2P":35.95,"TwentyNewsgroupsClustering":54.24} +{"Rank":10,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.9,"ArxivClusteringP2P":45.01,"ArxivClusteringS2S":36.85,"BiorxivClusteringP2P":36.66,"BiorxivClusteringS2S":34.21,"MedrxivClusteringP2P":32.6,"MedrxivClusteringS2S":30.8,"RedditClustering":61.42,"RedditClusteringP2P":64.13,"StackExchangeClustering":72.22,"StackExchangeClusteringP2P":38.49,"TwentyNewsgroupsClustering":52.56} +{"Rank":11,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":45.54,"ArxivClusteringP2P":42.81,"ArxivClusteringS2S":44.24,"BiorxivClusteringP2P":34.27,"BiorxivClusteringS2S":35.53,"MedrxivClusteringP2P":31.07,"MedrxivClusteringS2S":31.27,"RedditClustering":60.24,"RedditClusteringP2P":64.12,"StackExchangeClustering":70.73,"StackExchangeClusteringP2P":34.5,"TwentyNewsgroupsClustering":52.18} +{"Rank":12,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":45.24,"ArxivClusteringP2P":43.14,"ArxivClusteringS2S":42.38,"BiorxivClusteringP2P":35.88,"BiorxivClusteringS2S":34.81,"MedrxivClusteringP2P":32.23,"MedrxivClusteringS2S":31.37,"RedditClustering":61.1,"RedditClusteringP2P":64.52,"StackExchangeClustering":67.98,"StackExchangeClusteringP2P":33.2,"TwentyNewsgroupsClustering":51.04} +{"Rank":13,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":45.07,"ArxivClusteringP2P":44.12,"ArxivClusteringS2S":36.54,"BiorxivClusteringP2P":36.28,"BiorxivClusteringS2S":33.09,"MedrxivClusteringP2P":32.08,"MedrxivClusteringS2S":30.84,"RedditClustering":62.24,"RedditClusteringP2P":63.7,"StackExchangeClustering":70.19,"StackExchangeClusteringP2P":36.1,"TwentyNewsgroupsClustering":50.6} +{"Rank":14,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":43.72,"ArxivClusteringP2P":42.89,"ArxivClusteringS2S":33.47,"BiorxivClusteringP2P":36.53,"BiorxivClusteringS2S":28.66,"MedrxivClusteringP2P":32.09,"MedrxivClusteringS2S":26.82,"RedditClustering":58.99,"RedditClusteringP2P":64.46,"StackExchangeClustering":70.78,"StackExchangeClusteringP2P":35.25,"TwentyNewsgroupsClustering":50.93} +{"Rank":15,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":43.71,"ArxivClusteringP2P":45.45,"ArxivClusteringS2S":36.19,"BiorxivClusteringP2P":38.41,"BiorxivClusteringS2S":32.28,"MedrxivClusteringP2P":34.47,"MedrxivClusteringS2S":31.43,"RedditClustering":55.9,"RedditClusteringP2P":60.58,"StackExchangeClustering":62.94,"StackExchangeClusteringP2P":33.81,"TwentyNewsgroupsClustering":49.36} +{"Rank":16,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":43.69,"ArxivClusteringP2P":48.38,"ArxivClusteringS2S":39.72,"BiorxivClusteringP2P":39.62,"BiorxivClusteringS2S":35.02,"MedrxivClusteringP2P":35.58,"MedrxivClusteringS2S":32.87,"RedditClustering":54.82,"RedditClusteringP2P":56.77,"StackExchangeClustering":53.8,"StackExchangeClusteringP2P":34.28,"TwentyNewsgroupsClustering":49.74} +{"Rank":17,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":43.57,"ArxivClusteringP2P":43.47,"ArxivClusteringS2S":39.85,"BiorxivClusteringP2P":37.1,"BiorxivClusteringS2S":34.28,"MedrxivClusteringP2P":33.55,"MedrxivClusteringS2S":31.11,"RedditClustering":53.02,"RedditClusteringP2P":60.47,"StackExchangeClustering":63.04,"StackExchangeClusteringP2P":34.01,"TwentyNewsgroupsClustering":49.37} +{"Rank":18,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":43.16,"ArxivClusteringP2P":44.82,"ArxivClusteringS2S":35.32,"BiorxivClusteringP2P":38.19,"BiorxivClusteringS2S":31.83,"MedrxivClusteringP2P":34.08,"MedrxivClusteringS2S":30.98,"RedditClustering":54.92,"RedditClusteringP2P":60.23,"StackExchangeClustering":61.81,"StackExchangeClusteringP2P":34.03,"TwentyNewsgroupsClustering":48.56} +{"Rank":19,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":42.42,"ArxivClusteringP2P":37.9,"ArxivClusteringS2S":32.39,"BiorxivClusteringP2P":30.48,"BiorxivClusteringS2S":27.5,"MedrxivClusteringP2P":29.12,"MedrxivClusteringS2S":27.56,"RedditClustering":64.13,"RedditClusteringP2P":62.84,"StackExchangeClustering":71.43,"StackExchangeClusteringP2P":32.85,"TwentyNewsgroupsClustering":50.44} +{"Rank":20,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":42.34,"ArxivClusteringP2P":41.62,"ArxivClusteringS2S":31.17,"BiorxivClusteringP2P":36.43,"BiorxivClusteringS2S":26.47,"MedrxivClusteringP2P":32.3,"MedrxivClusteringS2S":26.93,"RedditClustering":57.03,"RedditClusteringP2P":62.34,"StackExchangeClustering":67.13,"StackExchangeClusteringP2P":34.79,"TwentyNewsgroupsClustering":49.53} +{"Rank":21,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":42.24,"ArxivClusteringP2P":43.87,"ArxivClusteringS2S":34.57,"BiorxivClusteringP2P":36.79,"BiorxivClusteringS2S":30.68,"MedrxivClusteringP2P":34.09,"MedrxivClusteringS2S":31.3,"RedditClustering":53.31,"RedditClusteringP2P":58.96,"StackExchangeClustering":59.92,"StackExchangeClusteringP2P":33.88,"TwentyNewsgroupsClustering":47.29} +{"Rank":22,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":41.99,"ArxivClusteringP2P":49.22,"ArxivClusteringS2S":41.71,"BiorxivClusteringP2P":38.39,"BiorxivClusteringS2S":31.31,"MedrxivClusteringP2P":31.47,"MedrxivClusteringS2S":27.87,"RedditClustering":43.67,"RedditClusteringP2P":61.67,"StackExchangeClustering":68.2,"StackExchangeClusteringP2P":36.36,"TwentyNewsgroupsClustering":32.01} +{"Rank":23,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":41.94,"ArxivClusteringP2P":46.55,"ArxivClusteringS2S":37.86,"BiorxivClusteringP2P":38.37,"BiorxivClusteringS2S":32.88,"MedrxivClusteringP2P":34.39,"MedrxivClusteringS2S":31.86,"RedditClustering":50.7,"RedditClusteringP2P":54.8,"StackExchangeClustering":53.14,"StackExchangeClusteringP2P":34.26,"TwentyNewsgroupsClustering":46.49} +{"Rank":24,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":41.81,"ArxivClusteringP2P":46.07,"ArxivClusteringS2S":37.5,"BiorxivClusteringP2P":36.99,"BiorxivClusteringS2S":33.21,"MedrxivClusteringP2P":34.25,"MedrxivClusteringS2S":32.24,"RedditClustering":51.18,"RedditClusteringP2P":54.8,"StackExchangeClustering":53.05,"StackExchangeClusteringP2P":33.13,"TwentyNewsgroupsClustering":47.47} +{"Rank":25,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":41.65,"ArxivClusteringP2P":41.62,"ArxivClusteringS2S":29.44,"BiorxivClusteringP2P":35.99,"BiorxivClusteringS2S":24.02,"MedrxivClusteringP2P":32.4,"MedrxivClusteringS2S":26.33,"RedditClustering":54.53,"RedditClusteringP2P":62.5,"StackExchangeClustering":65.11,"StackExchangeClusteringP2P":36.86,"TwentyNewsgroupsClustering":49.33} +{"Rank":26,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":41.6,"ArxivClusteringP2P":37.5,"ArxivClusteringS2S":30.55,"BiorxivClusteringP2P":29.59,"BiorxivClusteringS2S":25.72,"MedrxivClusteringP2P":28.72,"MedrxivClusteringS2S":27.39,"RedditClustering":61.69,"RedditClusteringP2P":61.67,"StackExchangeClustering":69.93,"StackExchangeClusteringP2P":33.21,"TwentyNewsgroupsClustering":51.64} +{"Rank":27,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":41.51,"ArxivClusteringP2P":37.9,"ArxivClusteringS2S":30.45,"BiorxivClusteringP2P":30.52,"BiorxivClusteringS2S":26.06,"MedrxivClusteringP2P":28.69,"MedrxivClusteringS2S":26.69,"RedditClustering":61.34,"RedditClusteringP2P":61.11,"StackExchangeClustering":69.95,"StackExchangeClusteringP2P":32.73,"TwentyNewsgroupsClustering":51.15} +{"Rank":28,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":41.1,"ArxivClusteringP2P":42.61,"ArxivClusteringS2S":32.32,"BiorxivClusteringP2P":34.97,"BiorxivClusteringS2S":29.08,"MedrxivClusteringP2P":31.19,"MedrxivClusteringS2S":27.27,"RedditClustering":54.89,"RedditClusteringP2P":57.58,"StackExchangeClustering":63.15,"StackExchangeClusteringP2P":32.25,"TwentyNewsgroupsClustering":46.82} +{"Rank":29,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":40.83,"ArxivClusteringP2P":47.81,"ArxivClusteringS2S":40.53,"BiorxivClusteringP2P":38.12,"BiorxivClusteringS2S":31.25,"MedrxivClusteringP2P":30.94,"MedrxivClusteringS2S":28.04,"RedditClustering":42.84,"RedditClusteringP2P":60.1,"StackExchangeClustering":65.12,"StackExchangeClusteringP2P":33.61,"TwentyNewsgroupsClustering":30.76} +{"Rank":30,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":40.63,"ArxivClusteringP2P":47.56,"ArxivClusteringS2S":39.92,"BiorxivClusteringP2P":36.14,"BiorxivClusteringS2S":30.26,"MedrxivClusteringP2P":30.11,"MedrxivClusteringS2S":26.93,"RedditClustering":41.83,"RedditClusteringP2P":62.08,"StackExchangeClustering":67.34,"StackExchangeClusteringP2P":34.5,"TwentyNewsgroupsClustering":30.26} +{"Rank":31,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":40.31,"ArxivClusteringP2P":41.8,"ArxivClusteringS2S":32.41,"BiorxivClusteringP2P":34.81,"BiorxivClusteringS2S":28.59,"MedrxivClusteringP2P":32.73,"MedrxivClusteringS2S":29.91,"RedditClustering":50.31,"RedditClusteringP2P":56.57,"StackExchangeClustering":57.99,"StackExchangeClusteringP2P":33.64,"TwentyNewsgroupsClustering":44.61} +{"Rank":32,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":40.21,"ArxivClusteringP2P":39.28,"ArxivClusteringS2S":27.26,"BiorxivClusteringP2P":33.99,"BiorxivClusteringS2S":22.92,"MedrxivClusteringP2P":33.2,"MedrxivClusteringS2S":26.13,"RedditClustering":52.93,"RedditClusteringP2P":59.67,"StackExchangeClustering":63.13,"StackExchangeClusteringP2P":35.68,"TwentyNewsgroupsClustering":48.1} +{"Rank":33,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":39.62,"ArxivClusteringP2P":43.35,"ArxivClusteringS2S":36.0,"BiorxivClusteringP2P":37.55,"BiorxivClusteringS2S":30.33,"MedrxivClusteringP2P":30.6,"MedrxivClusteringS2S":28.73,"RedditClustering":43.15,"RedditClusteringP2P":61.69,"StackExchangeClustering":55.31,"StackExchangeClusteringP2P":33.51,"TwentyNewsgroupsClustering":35.55} +{"Rank":34,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":38.63,"ArxivClusteringP2P":35.49,"ArxivClusteringS2S":27.18,"BiorxivClusteringP2P":27.66,"BiorxivClusteringS2S":23.25,"MedrxivClusteringP2P":27.57,"MedrxivClusteringS2S":25.13,"RedditClustering":56.13,"RedditClusteringP2P":58.53,"StackExchangeClustering":64.21,"StackExchangeClusteringP2P":33.01,"TwentyNewsgroupsClustering":46.72} +{"Rank":35,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":38.5,"ArxivClusteringP2P":37.78,"ArxivClusteringS2S":31.68,"BiorxivClusteringP2P":33.02,"BiorxivClusteringS2S":29.45,"MedrxivClusteringP2P":31.93,"MedrxivClusteringS2S":31.53,"RedditClustering":45.65,"RedditClusteringP2P":52.05,"StackExchangeClustering":52.99,"StackExchangeClusteringP2P":33.06,"TwentyNewsgroupsClustering":44.36} +{"Rank":36,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":37.64,"ArxivClusteringP2P":36.94,"ArxivClusteringS2S":29.03,"BiorxivClusteringP2P":32.35,"BiorxivClusteringS2S":28.16,"MedrxivClusteringP2P":30.23,"MedrxivClusteringS2S":27.01,"RedditClustering":48.04,"RedditClusteringP2P":53.53,"StackExchangeClustering":59.54,"StackExchangeClusteringP2P":30.48,"TwentyNewsgroupsClustering":38.68} +{"Rank":37,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.52,"ArxivClusteringP2P":41.49,"ArxivClusteringS2S":28.47,"BiorxivClusteringP2P":36.86,"BiorxivClusteringS2S":27.55,"MedrxivClusteringP2P":31.09,"MedrxivClusteringS2S":26.5,"RedditClustering":42.47,"RedditClusteringP2P":58.1,"StackExchangeClustering":53.52,"StackExchangeClusteringP2P":30.43,"TwentyNewsgroupsClustering":36.26} +{"Rank":38,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":37.45,"ArxivClusteringP2P":42.92,"ArxivClusteringS2S":35.2,"BiorxivClusteringP2P":35.02,"BiorxivClusteringS2S":27.21,"MedrxivClusteringP2P":30.15,"MedrxivClusteringS2S":26.96,"RedditClustering":38.67,"RedditClusteringP2P":53.42,"StackExchangeClustering":59.35,"StackExchangeClusteringP2P":31.47,"TwentyNewsgroupsClustering":31.54} +{"Rank":39,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":37.14,"ArxivClusteringP2P":38.33,"ArxivClusteringS2S":31.55,"BiorxivClusteringP2P":33.49,"BiorxivClusteringS2S":29.44,"MedrxivClusteringP2P":31.52,"MedrxivClusteringS2S":30.87,"RedditClustering":42.02,"RedditClusteringP2P":50.73,"StackExchangeClustering":49.6,"StackExchangeClusteringP2P":31.69,"TwentyNewsgroupsClustering":39.28} +{"Rank":40,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":34.06,"ArxivClusteringP2P":44.75,"ArxivClusteringS2S":35.27,"BiorxivClusteringP2P":39.52,"BiorxivClusteringS2S":34.53,"MedrxivClusteringP2P":35.04,"MedrxivClusteringS2S":31.66,"RedditClustering":24.13,"RedditClusteringP2P":35.06,"StackExchangeClustering":39.01,"StackExchangeClusteringP2P":31.46,"TwentyNewsgroupsClustering":24.22} +{"Rank":41,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":33.43,"ArxivClusteringP2P":35.18,"ArxivClusteringS2S":27.54,"BiorxivClusteringP2P":30.15,"BiorxivClusteringS2S":24.67,"MedrxivClusteringP2P":26.25,"MedrxivClusteringS2S":24.12,"RedditClustering":40.23,"RedditClusteringP2P":47.74,"StackExchangeClustering":47.55,"StackExchangeClusteringP2P":29.45,"TwentyNewsgroupsClustering":34.86} +{"Rank":42,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":32.28,"ArxivClusteringP2P":35.27,"ArxivClusteringS2S":23.18,"BiorxivClusteringP2P":31.13,"BiorxivClusteringS2S":26.78,"MedrxivClusteringP2P":24.65,"MedrxivClusteringS2S":24.21,"RedditClustering":38.74,"RedditClusteringP2P":51.92,"StackExchangeClustering":42.7,"StackExchangeClusteringP2P":28.7,"TwentyNewsgroupsClustering":27.82} +{"Rank":43,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":30.12,"ArxivClusteringP2P":35.19,"ArxivClusteringS2S":27.51,"BiorxivClusteringP2P":30.12,"BiorxivClusteringS2S":24.77,"MedrxivClusteringP2P":26.09,"MedrxivClusteringS2S":23.6,"RedditClustering":27.24,"RedditClusteringP2P":43.32,"StackExchangeClustering":43.58,"StackExchangeClusteringP2P":26.55,"TwentyNewsgroupsClustering":23.35} +{"Rank":44,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":29.55,"ArxivClusteringP2P":32.13,"ArxivClusteringS2S":22.05,"BiorxivClusteringP2P":29.84,"BiorxivClusteringS2S":20.57,"MedrxivClusteringP2P":30.13,"MedrxivClusteringS2S":24.82,"RedditClustering":28.79,"RedditClusteringP2P":49.14,"StackExchangeClustering":35.43,"StackExchangeClusteringP2P":28.83,"TwentyNewsgroupsClustering":23.28} +{"Rank":45,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":29.04,"ArxivClusteringP2P":32.61,"ArxivClusteringS2S":24.68,"BiorxivClusteringP2P":24.9,"BiorxivClusteringS2S":19.55,"MedrxivClusteringP2P":23.6,"MedrxivClusteringS2S":21.97,"RedditClustering":32.18,"RedditClusteringP2P":45.14,"StackExchangeClustering":43.07,"StackExchangeClusteringP2P":28.5,"TwentyNewsgroupsClustering":23.21} +{"Rank":46,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":27.73,"ArxivClusteringP2P":32.56,"ArxivClusteringS2S":23.14,"BiorxivClusteringP2P":29.27,"BiorxivClusteringS2S":19.18,"MedrxivClusteringP2P":26.12,"MedrxivClusteringS2S":20.38,"RedditClustering":28.46,"RedditClusteringP2P":35.82,"StackExchangeClustering":35.8,"StackExchangeClusteringP2P":28.51,"TwentyNewsgroupsClustering":25.83} +{"Rank":47,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":26.57,"ArxivClusteringP2P":34.73,"ArxivClusteringS2S":26.01,"BiorxivClusteringP2P":29.76,"BiorxivClusteringS2S":20.71,"MedrxivClusteringP2P":26.65,"MedrxivClusteringS2S":21.5,"RedditClustering":28.84,"RedditClusteringP2P":7.37,"StackExchangeClustering":39.04,"StackExchangeClusteringP2P":30.23,"TwentyNewsgroupsClustering":27.42} +{"Rank":48,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":23.75,"ArxivClusteringP2P":24.83,"ArxivClusteringS2S":16.68,"BiorxivClusteringP2P":20.0,"BiorxivClusteringS2S":12.67,"MedrxivClusteringP2P":20.79,"MedrxivClusteringS2S":18.18,"RedditClustering":26.28,"RedditClusteringP2P":40.48,"StackExchangeClustering":33.51,"StackExchangeClusteringP2P":27.98,"TwentyNewsgroupsClustering":19.9} +{"Rank":49,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":15.28,"ArxivClusteringP2P":17.77,"ArxivClusteringS2S":12.39,"BiorxivClusteringP2P":12.4,"BiorxivClusteringS2S":8.83,"MedrxivClusteringP2P":17.91,"MedrxivClusteringS2S":16.63,"RedditClustering":9.96,"RedditClusteringP2P":26.42,"StackExchangeClustering":15.79,"StackExchangeClusteringP2P":18.63,"TwentyNewsgroupsClustering":11.38} +{"Rank":50,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":51,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":52,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":53,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":54,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":55,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":56,"Model":"bm25s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":57,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":39.44,"BiorxivClusteringS2S":36.62,"MedrxivClusteringP2P":33.21,"MedrxivClusteringS2S":31.68,"RedditClustering":56.61,"RedditClusteringP2P":62.66,"StackExchangeClustering":66.11,"StackExchangeClusteringP2P":35.24,"TwentyNewsgroupsClustering":50.75} +{"Rank":58,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":59,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":60,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":61,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":62,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":63,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":64,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":65,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":66,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":67,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":68,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":69,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":70,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":71,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":72,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":73,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":74,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":75,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":76,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":77,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":78,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":79,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":80,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":81,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":82,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":83,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":84,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":37.12,"BiorxivClusteringS2S":33.41,"MedrxivClusteringP2P":31.82,"MedrxivClusteringS2S":29.68,"RedditClustering":56.54,"RedditClusteringP2P":63.23,"StackExchangeClustering":64.6,"StackExchangeClusteringP2P":33.02,"TwentyNewsgroupsClustering":49.86} +{"Rank":85,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":36.72,"BiorxivClusteringS2S":35.47,"MedrxivClusteringP2P":31.45,"MedrxivClusteringS2S":29.91,"RedditClustering":55.5,"RedditClusteringP2P":63.71,"StackExchangeClustering":65.23,"StackExchangeClusteringP2P":33.62,"TwentyNewsgroupsClustering":48.73} +{"Rank":86,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":87,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":36.1,"BiorxivClusteringS2S":31.51,"MedrxivClusteringP2P":31.31,"MedrxivClusteringS2S":28.32,"RedditClustering":43.27,"RedditClusteringP2P":57.22,"StackExchangeClustering":59.6,"StackExchangeClusteringP2P":30.82,"TwentyNewsgroupsClustering":37.65} +{"Rank":88,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":35.5,"BiorxivClusteringS2S":33.3,"MedrxivClusteringP2P":31.7,"MedrxivClusteringS2S":29.76,"RedditClustering":46.91,"RedditClusteringP2P":63.0,"StackExchangeClustering":58.37,"StackExchangeClusteringP2P":32.9,"TwentyNewsgroupsClustering":39.4} +{"Rank":89,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":35.84,"BiorxivClusteringS2S":27.35,"MedrxivClusteringP2P":30.72,"MedrxivClusteringS2S":27.0,"RedditClustering":40.12,"RedditClusteringP2P":59.49,"StackExchangeClustering":53.32,"StackExchangeClusteringP2P":31.87,"TwentyNewsgroupsClustering":33.67} +{"Rank":90,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":91,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":92,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":93,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":94,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":95,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":96,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"ArxivClusteringP2P":33.59,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":97,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":98,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":99,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":100,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":101,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":102,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":103,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":104,"Model":"text-search-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":26.05,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":25.67,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":44.92} +{"Rank":105,"Model":"text-search-ada-doc-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":32.92} +{"Rank":106,"Model":"text-search-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":107,"Model":"text-search-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":108,"Model":"text-search-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":null,"RedditClusteringP2P":null,"StackExchangeClustering":null,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":null} +{"Rank":109,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":45.64,"RedditClusteringP2P":null,"StackExchangeClustering":53.01,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":42.01} +{"Rank":110,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":40.79,"RedditClusteringP2P":null,"StackExchangeClustering":55.14,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":37.64} +{"Rank":111,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArxivClusteringP2P":null,"ArxivClusteringS2S":null,"BiorxivClusteringP2P":null,"BiorxivClusteringS2S":null,"MedrxivClusteringP2P":null,"MedrxivClusteringS2S":null,"RedditClustering":31.78,"RedditClusteringP2P":null,"StackExchangeClustering":36.86,"StackExchangeClusteringP2P":null,"TwentyNewsgroupsClustering":29.33} diff --git a/boards_data/en/data_tasks/PairClassification/default.jsonl b/boards_data/en/data_tasks/PairClassification/default.jsonl index eeeec4033d0dfc05603274cbec84063b4966a7f2..e270b8a6b79fef1f3a221bf62dc5f3c8b24d3bba 100644 --- a/boards_data/en/data_tasks/PairClassification/default.jsonl +++ b/boards_data/en/data_tasks/PairClassification/default.jsonl @@ -1,206 +1,111 @@ -{"level_0":0,"index":6,"Rank":1,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":89.24,"SprintDuplicateQuestions":94.5,"TwitterSemEval2015":86.32,"TwitterURLCorpus":86.9} -{"level_0":1,"index":96,"Rank":2,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":88.54,"SprintDuplicateQuestions":96.31,"TwitterSemEval2015":81.52,"TwitterURLCorpus":87.78} -{"level_0":2,"index":58,"Rank":3,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":88.35,"SprintDuplicateQuestions":96.11,"TwitterSemEval2015":81.52,"TwitterURLCorpus":87.42} -{"level_0":3,"index":156,"Rank":4,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":88.34,"SprintDuplicateQuestions":95.66,"TwitterSemEval2015":81.62,"TwitterURLCorpus":87.75} -{"level_0":4,"index":219,"Rank":5,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.27,"SprintDuplicateQuestions":96.52,"TwitterSemEval2015":81.35,"TwitterURLCorpus":86.94} -{"level_0":5,"index":21,"Rank":6,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.14,"SprintDuplicateQuestions":97.23,"TwitterSemEval2015":79.34,"TwitterURLCorpus":87.84} -{"level_0":6,"index":95,"Rank":7,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.07,"SprintDuplicateQuestions":97.62,"TwitterSemEval2015":78.57,"TwitterURLCorpus":88.03} -{"level_0":7,"index":138,"Rank":8,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.07,"SprintDuplicateQuestions":96.04,"TwitterSemEval2015":80.58,"TwitterURLCorpus":87.58} -{"level_0":8,"index":60,"Rank":9,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":88.03,"SprintDuplicateQuestions":96.83,"TwitterSemEval2015":80.7,"TwitterURLCorpus":86.56} -{"level_0":9,"index":64,"Rank":10,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":87.99,"SprintDuplicateQuestions":96.82,"TwitterSemEval2015":80.6,"TwitterURLCorpus":86.56} -{"level_0":10,"index":62,"Rank":11,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":87.79,"SprintDuplicateQuestions":95.09,"TwitterSemEval2015":81.73,"TwitterURLCorpus":86.56} -{"level_0":11,"index":139,"Rank":12,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.74,"SprintDuplicateQuestions":95.59,"TwitterSemEval2015":80.18,"TwitterURLCorpus":87.46} -{"level_0":12,"index":51,"Rank":13,"Model":"neural-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.68,"SprintDuplicateQuestions":96.72,"TwitterSemEval2015":79.15,"TwitterURLCorpus":87.16} -{"level_0":13,"index":1,"Rank":14,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":87.61,"SprintDuplicateQuestions":96.26,"TwitterSemEval2015":79.04,"TwitterURLCorpus":87.53} -{"level_0":14,"index":204,"Rank":15,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.51,"SprintDuplicateQuestions":95.32,"TwitterSemEval2015":79.64,"TwitterURLCorpus":87.58} -{"level_0":15,"index":16,"Rank":16,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.51,"SprintDuplicateQuestions":95.32,"TwitterSemEval2015":79.64,"TwitterURLCorpus":87.58} -{"level_0":16,"index":15,"Rank":17,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":87.38,"SprintDuplicateQuestions":95.99,"TwitterSemEval2015":79.36,"TwitterURLCorpus":86.79} -{"level_0":17,"index":186,"Rank":18,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.37,"SprintDuplicateQuestions":96.83,"TwitterSemEval2015":79.29,"TwitterURLCorpus":85.98} -{"level_0":18,"index":178,"Rank":19,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.34,"SprintDuplicateQuestions":94.59,"TwitterSemEval2015":79.93,"TwitterURLCorpus":87.5} -{"level_0":19,"index":0,"Rank":20,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":87.25,"SprintDuplicateQuestions":96.49,"TwitterSemEval2015":78.23,"TwitterURLCorpus":87.04} -{"level_0":20,"index":108,"Rank":21,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.25,"SprintDuplicateQuestions":97.24,"TwitterSemEval2015":78.17,"TwitterURLCorpus":86.33} -{"level_0":21,"index":111,"Rank":22,"Model":"UAE-Large-V1-Q5_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.25,"SprintDuplicateQuestions":97.24,"TwitterSemEval2015":78.17,"TwitterURLCorpus":86.33} -{"level_0":22,"index":165,"Rank":23,"Model":"UAE-Large-V1-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.25,"SprintDuplicateQuestions":97.24,"TwitterSemEval2015":78.17,"TwitterURLCorpus":86.33} -{"level_0":23,"index":194,"Rank":24,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.2,"SprintDuplicateQuestions":96.82,"TwitterSemEval2015":78.55,"TwitterURLCorpus":86.23} -{"level_0":24,"index":133,"Rank":25,"Model":"mxbai-embed-large-v1-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.2,"SprintDuplicateQuestions":96.82,"TwitterSemEval2015":78.55,"TwitterURLCorpus":86.23} -{"level_0":25,"index":53,"Rank":26,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.14,"SprintDuplicateQuestions":96.97,"TwitterSemEval2015":78.29,"TwitterURLCorpus":86.16} -{"level_0":26,"index":114,"Rank":27,"Model":"bge-large-en-v1.5-Q4_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.12,"SprintDuplicateQuestions":96.73,"TwitterSemEval2015":79.04,"TwitterURLCorpus":85.6} -{"level_0":27,"index":22,"Rank":28,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":87.12,"SprintDuplicateQuestions":96.73,"TwitterSemEval2015":79.04,"TwitterURLCorpus":85.6} -{"level_0":28,"index":150,"Rank":29,"Model":"bge-large-en-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.12,"SprintDuplicateQuestions":96.73,"TwitterSemEval2015":79.04,"TwitterURLCorpus":85.6} -{"level_0":29,"index":197,"Rank":30,"Model":"b1ade-embed-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.07,"SprintDuplicateQuestions":96.87,"TwitterSemEval2015":78.24,"TwitterURLCorpus":86.11} -{"level_0":30,"index":261,"Rank":31,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.07,"SprintDuplicateQuestions":96.87,"TwitterSemEval2015":78.24,"TwitterURLCorpus":86.11} -{"level_0":31,"index":215,"Rank":32,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Average":86.91,"SprintDuplicateQuestions":95.94,"TwitterSemEval2015":78.73,"TwitterURLCorpus":86.05} -{"level_0":32,"index":9,"Rank":33,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":86.87,"SprintDuplicateQuestions":98.07,"TwitterSemEval2015":74.44,"TwitterURLCorpus":88.11} -{"level_0":33,"index":170,"Rank":34,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.85,"SprintDuplicateQuestions":96.79,"TwitterSemEval2015":78.23,"TwitterURLCorpus":85.53} -{"level_0":34,"index":200,"Rank":35,"Model":"bge-large-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.74,"SprintDuplicateQuestions":96.49,"TwitterSemEval2015":77.8,"TwitterURLCorpus":85.94} -{"level_0":35,"index":117,"Rank":36,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.7,"SprintDuplicateQuestions":96.54,"TwitterSemEval2015":77.6,"TwitterURLCorpus":85.96} -{"level_0":36,"index":149,"Rank":37,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":86.62,"SprintDuplicateQuestions":94.94,"TwitterSemEval2015":77.99,"TwitterURLCorpus":86.93} -{"level_0":37,"index":8,"Rank":38,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.57,"SprintDuplicateQuestions":96.01,"TwitterSemEval2015":76.87,"TwitterURLCorpus":86.84} -{"level_0":38,"index":120,"Rank":39,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.55,"SprintDuplicateQuestions":96.33,"TwitterSemEval2015":77.68,"TwitterURLCorpus":85.65} -{"level_0":39,"index":182,"Rank":40,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.55,"SprintDuplicateQuestions":96.33,"TwitterSemEval2015":77.68,"TwitterURLCorpus":85.65} -{"level_0":40,"index":181,"Rank":41,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.55,"SprintDuplicateQuestions":96.33,"TwitterSemEval2015":77.68,"TwitterURLCorpus":85.65} -{"level_0":41,"index":179,"Rank":42,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.55,"SprintDuplicateQuestions":96.33,"TwitterSemEval2015":77.68,"TwitterURLCorpus":85.65} -{"level_0":42,"index":20,"Rank":43,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":86.55,"SprintDuplicateQuestions":96.33,"TwitterSemEval2015":77.68,"TwitterURLCorpus":85.65} -{"level_0":43,"index":180,"Rank":44,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.55,"SprintDuplicateQuestions":96.33,"TwitterSemEval2015":77.68,"TwitterURLCorpus":85.65} -{"level_0":44,"index":151,"Rank":45,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.45,"SprintDuplicateQuestions":95.69,"TwitterSemEval2015":77.67,"TwitterURLCorpus":86.0} -{"level_0":45,"index":137,"Rank":46,"Model":"stella-base-en-v2-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.45,"SprintDuplicateQuestions":95.69,"TwitterSemEval2015":77.67,"TwitterURLCorpus":86.0} -{"level_0":46,"index":115,"Rank":47,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.32,"SprintDuplicateQuestions":95.93,"TwitterSemEval2015":76.92,"TwitterURLCorpus":86.11} -{"level_0":47,"index":198,"Rank":48,"Model":"bge-base-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.24,"SprintDuplicateQuestions":96.38,"TwitterSemEval2015":76.41,"TwitterURLCorpus":85.93} -{"level_0":48,"index":66,"Rank":49,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":86.21,"SprintDuplicateQuestions":96.25,"TwitterSemEval2015":76.14,"TwitterURLCorpus":86.23} -{"level_0":49,"index":161,"Rank":50,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.19,"SprintDuplicateQuestions":91.18,"TwitterSemEval2015":80.27,"TwitterURLCorpus":87.12} -{"level_0":50,"index":36,"Rank":51,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.15,"SprintDuplicateQuestions":96.79,"TwitterSemEval2015":75.16,"TwitterURLCorpus":86.49} -{"level_0":51,"index":237,"Rank":52,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":86.13,"SprintDuplicateQuestions":95.45,"TwitterSemEval2015":77.81,"TwitterURLCorpus":85.14} -{"level_0":52,"index":238,"Rank":53,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":86.12,"SprintDuplicateQuestions":95.68,"TwitterSemEval2015":77.54,"TwitterURLCorpus":85.13} -{"level_0":53,"index":245,"Rank":54,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":86.06,"SprintDuplicateQuestions":91.44,"TwitterSemEval2015":80.89,"TwitterURLCorpus":85.86} -{"level_0":54,"index":155,"Rank":55,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":86.03,"SprintDuplicateQuestions":94.92,"TwitterSemEval2015":76.92,"TwitterURLCorpus":86.25} -{"level_0":55,"index":93,"Rank":56,"Model":"e5-large-v2-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.03,"SprintDuplicateQuestions":94.92,"TwitterSemEval2015":76.92,"TwitterURLCorpus":86.25} -{"level_0":56,"index":154,"Rank":57,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":85.94,"SprintDuplicateQuestions":95.42,"TwitterSemEval2015":76.1,"TwitterURLCorpus":86.31} -{"level_0":57,"index":148,"Rank":58,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":85.89,"SprintDuplicateQuestions":93.07,"TwitterSemEval2015":77.42,"TwitterURLCorpus":87.18} -{"level_0":58,"index":193,"Rank":59,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.89,"SprintDuplicateQuestions":96.05,"TwitterSemEval2015":76.08,"TwitterURLCorpus":85.54} -{"level_0":59,"index":23,"Rank":60,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.84,"SprintDuplicateQuestions":90.94,"TwitterSemEval2015":79.64,"TwitterURLCorpus":86.95} -{"level_0":60,"index":34,"Rank":61,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.84,"SprintDuplicateQuestions":96.25,"TwitterSemEval2015":74.8,"TwitterURLCorpus":86.46} -{"level_0":61,"index":126,"Rank":62,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.79,"SprintDuplicateQuestions":92.82,"TwitterSemEval2015":77.96,"TwitterURLCorpus":86.59} -{"level_0":62,"index":205,"Rank":63,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.79,"SprintDuplicateQuestions":92.82,"TwitterSemEval2015":77.96,"TwitterURLCorpus":86.59} -{"level_0":63,"index":17,"Rank":64,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":85.79,"SprintDuplicateQuestions":92.82,"TwitterSemEval2015":77.96,"TwitterURLCorpus":86.59} -{"level_0":64,"index":153,"Rank":65,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":85.73,"SprintDuplicateQuestions":94.58,"TwitterSemEval2015":75.97,"TwitterURLCorpus":86.63} -{"level_0":65,"index":140,"Rank":66,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":85.73,"SprintDuplicateQuestions":94.58,"TwitterSemEval2015":75.97,"TwitterURLCorpus":86.63} -{"level_0":66,"index":283,"Rank":67,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.72,"SprintDuplicateQuestions":92.25,"TwitterSemEval2015":77.13,"TwitterURLCorpus":87.78} -{"level_0":67,"index":267,"Rank":68,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.41,"SprintDuplicateQuestions":95.02,"TwitterSemEval2015":75.24,"TwitterURLCorpus":85.96} -{"level_0":68,"index":169,"Rank":69,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.4,"SprintDuplicateQuestions":95.62,"TwitterSemEval2015":73.81,"TwitterURLCorpus":86.78} -{"level_0":69,"index":175,"Rank":70,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":85.38,"SprintDuplicateQuestions":95.3,"TwitterSemEval2015":74.74,"TwitterURLCorpus":86.09} -{"level_0":70,"index":135,"Rank":71,"Model":"jina-embeddings-v2-base-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.38,"SprintDuplicateQuestions":95.3,"TwitterSemEval2015":74.74,"TwitterURLCorpus":86.09} -{"level_0":71,"index":18,"Rank":72,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.33,"SprintDuplicateQuestions":94.94,"TwitterSemEval2015":75.53,"TwitterURLCorpus":85.51} -{"level_0":72,"index":236,"Rank":73,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":85.32,"SprintDuplicateQuestions":95.05,"TwitterSemEval2015":76.03,"TwitterURLCorpus":84.89} -{"level_0":73,"index":243,"Rank":74,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":85.18,"SprintDuplicateQuestions":91.23,"TwitterSemEval2015":78.25,"TwitterURLCorpus":86.05} -{"level_0":74,"index":129,"Rank":75,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":85.15,"SprintDuplicateQuestions":93.52,"TwitterSemEval2015":75.23,"TwitterURLCorpus":86.71} -{"level_0":75,"index":206,"Rank":76,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":85.15,"SprintDuplicateQuestions":93.52,"TwitterSemEval2015":75.23,"TwitterURLCorpus":86.71} -{"level_0":76,"index":28,"Rank":77,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.15,"SprintDuplicateQuestions":93.52,"TwitterSemEval2015":75.23,"TwitterURLCorpus":86.71} -{"level_0":77,"index":29,"Rank":78,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.15,"SprintDuplicateQuestions":93.52,"TwitterSemEval2015":75.23,"TwitterURLCorpus":86.71} -{"level_0":78,"index":26,"Rank":79,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.15,"SprintDuplicateQuestions":93.52,"TwitterSemEval2015":75.23,"TwitterURLCorpus":86.71} -{"level_0":79,"index":27,"Rank":80,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.15,"SprintDuplicateQuestions":93.52,"TwitterSemEval2015":75.23,"TwitterURLCorpus":86.71} -{"level_0":80,"index":152,"Rank":81,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":85.09,"SprintDuplicateQuestions":94.88,"TwitterSemEval2015":74.4,"TwitterURLCorpus":85.98} -{"level_0":81,"index":157,"Rank":82,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":85.08,"SprintDuplicateQuestions":95.29,"TwitterSemEval2015":74.16,"TwitterURLCorpus":85.79} -{"level_0":82,"index":201,"Rank":83,"Model":"bge-large-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.06,"SprintDuplicateQuestions":95.0,"TwitterSemEval2015":74.5,"TwitterURLCorpus":85.69} -{"level_0":83,"index":246,"Rank":84,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":85.06,"SprintDuplicateQuestions":88.89,"TwitterSemEval2015":80.28,"TwitterURLCorpus":86.01} -{"level_0":84,"index":202,"Rank":85,"Model":"bge-small-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.05,"SprintDuplicateQuestions":96.25,"TwitterSemEval2015":73.26,"TwitterURLCorpus":85.64} -{"level_0":85,"index":33,"Rank":86,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.05,"SprintDuplicateQuestions":96.59,"TwitterSemEval2015":72.23,"TwitterURLCorpus":86.32} -{"level_0":86,"index":284,"Rank":87,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.04,"SprintDuplicateQuestions":94.58,"TwitterSemEval2015":73.33,"TwitterURLCorpus":87.21} -{"level_0":87,"index":253,"Rank":88,"Model":"gte-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.0,"SprintDuplicateQuestions":95.75,"TwitterSemEval2015":73.73,"TwitterURLCorpus":85.53} -{"level_0":88,"index":119,"Rank":89,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.99,"SprintDuplicateQuestions":96.14,"TwitterSemEval2015":73.48,"TwitterURLCorpus":85.36} -{"level_0":89,"index":125,"Rank":90,"Model":"NoInstruct-small-Embedding-v0-Q4_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.99,"SprintDuplicateQuestions":96.14,"TwitterSemEval2015":73.48,"TwitterURLCorpus":85.36} -{"level_0":90,"index":244,"Rank":91,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":84.97,"SprintDuplicateQuestions":89.01,"TwitterSemEval2015":79.75,"TwitterURLCorpus":86.14} -{"level_0":91,"index":43,"Rank":92,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.97,"SprintDuplicateQuestions":91.24,"TwitterSemEval2015":77.21,"TwitterURLCorpus":86.45} -{"level_0":92,"index":24,"Rank":93,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":84.92,"SprintDuplicateQuestions":96.67,"TwitterSemEval2015":73.24,"TwitterURLCorpus":84.84} -{"level_0":93,"index":281,"Rank":94,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.89,"SprintDuplicateQuestions":92.17,"TwitterSemEval2015":75.28,"TwitterURLCorpus":87.22} -{"level_0":94,"index":172,"Rank":95,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.8,"SprintDuplicateQuestions":96.09,"TwitterSemEval2015":72.32,"TwitterURLCorpus":85.98} -{"level_0":95,"index":160,"Rank":96,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":84.75,"SprintDuplicateQuestions":93.13,"TwitterSemEval2015":75.28,"TwitterURLCorpus":85.83} -{"level_0":96,"index":199,"Rank":97,"Model":"bge-base-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.69,"SprintDuplicateQuestions":95.5,"TwitterSemEval2015":72.92,"TwitterURLCorpus":85.66} -{"level_0":97,"index":268,"Rank":98,"Model":"gte-large-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.69,"SprintDuplicateQuestions":95.63,"TwitterSemEval2015":72.87,"TwitterURLCorpus":85.58} -{"level_0":98,"index":118,"Rank":99,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.68,"SprintDuplicateQuestions":95.79,"TwitterSemEval2015":72.95,"TwitterURLCorpus":85.3} -{"level_0":99,"index":158,"Rank":100,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.67,"SprintDuplicateQuestions":94.88,"TwitterSemEval2015":73.34,"TwitterURLCorpus":85.79} -{"level_0":100,"index":213,"Rank":101,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.61,"SprintDuplicateQuestions":92.9,"TwitterSemEval2015":74.27,"TwitterURLCorpus":86.65} -{"level_0":101,"index":211,"Rank":102,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":84.59,"SprintDuplicateQuestions":92.91,"TwitterSemEval2015":74.3,"TwitterURLCorpus":86.57} -{"level_0":102,"index":207,"Rank":103,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.59,"SprintDuplicateQuestions":94.83,"TwitterSemEval2015":72.55,"TwitterURLCorpus":86.38} -{"level_0":103,"index":252,"Rank":104,"Model":"gte-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.57,"SprintDuplicateQuestions":95.71,"TwitterSemEval2015":72.47,"TwitterURLCorpus":85.52} -{"level_0":104,"index":176,"Rank":105,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.56,"SprintDuplicateQuestions":95.62,"TwitterSemEval2015":71.62,"TwitterURLCorpus":86.44} -{"level_0":105,"index":19,"Rank":106,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.53,"SprintDuplicateQuestions":94.16,"TwitterSemEval2015":75.25,"TwitterURLCorpus":84.18} -{"level_0":106,"index":124,"Rank":107,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":84.48,"SprintDuplicateQuestions":97.32,"TwitterSemEval2015":70.29,"TwitterURLCorpus":85.83} -{"level_0":107,"index":177,"Rank":108,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.44,"SprintDuplicateQuestions":95.12,"TwitterSemEval2015":72.15,"TwitterURLCorpus":86.05} -{"level_0":108,"index":136,"Rank":109,"Model":"jina-embeddings-v2-small-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.44,"SprintDuplicateQuestions":95.12,"TwitterSemEval2015":72.15,"TwitterURLCorpus":86.05} -{"level_0":109,"index":282,"Rank":110,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.22,"SprintDuplicateQuestions":89.02,"TwitterSemEval2015":76.56,"TwitterURLCorpus":87.09} -{"level_0":110,"index":210,"Rank":111,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":84.09,"SprintDuplicateQuestions":92.31,"TwitterSemEval2015":73.61,"TwitterURLCorpus":86.34} -{"level_0":111,"index":167,"Rank":112,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.06,"SprintDuplicateQuestions":95.36,"TwitterSemEval2015":70.64,"TwitterURLCorpus":86.18} -{"level_0":112,"index":35,"Rank":113,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.95,"SprintDuplicateQuestions":96.6,"TwitterSemEval2015":69.41,"TwitterURLCorpus":85.85} -{"level_0":113,"index":235,"Rank":114,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":83.85,"SprintDuplicateQuestions":94.55,"TwitterSemEval2015":72.23,"TwitterURLCorpus":84.77} -{"level_0":114,"index":208,"Rank":115,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.67,"SprintDuplicateQuestions":95.93,"TwitterSemEval2015":69.68,"TwitterURLCorpus":85.41} -{"level_0":115,"index":174,"Rank":116,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.65,"SprintDuplicateQuestions":94.87,"TwitterSemEval2015":70.25,"TwitterURLCorpus":85.83} -{"level_0":116,"index":269,"Rank":117,"Model":"gte-large-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.58,"SprintDuplicateQuestions":94.67,"TwitterSemEval2015":70.34,"TwitterURLCorpus":85.72} -{"level_0":117,"index":159,"Rank":118,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":83.57,"SprintDuplicateQuestions":93.01,"TwitterSemEval2015":72.21,"TwitterURLCorpus":85.48} -{"level_0":118,"index":209,"Rank":119,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":83.54,"SprintDuplicateQuestions":91.45,"TwitterSemEval2015":73.23,"TwitterURLCorpus":85.93} -{"level_0":119,"index":254,"Rank":120,"Model":"gte-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.54,"SprintDuplicateQuestions":95.57,"TwitterSemEval2015":70.12,"TwitterURLCorpus":84.92} -{"level_0":120,"index":147,"Rank":121,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":83.51,"SprintDuplicateQuestions":90.35,"TwitterSemEval2015":73.99,"TwitterURLCorpus":86.2} -{"level_0":121,"index":116,"Rank":122,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.39,"SprintDuplicateQuestions":94.14,"TwitterSemEval2015":70.46,"TwitterURLCorpus":85.58} -{"level_0":122,"index":69,"Rank":123,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.32,"SprintDuplicateQuestions":95.12,"TwitterSemEval2015":69.78,"TwitterURLCorpus":85.07} -{"level_0":123,"index":166,"Rank":124,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.11,"SprintDuplicateQuestions":94.58,"TwitterSemEval2015":69.14,"TwitterURLCorpus":85.62} -{"level_0":124,"index":270,"Rank":125,"Model":"gte-small-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.09,"SprintDuplicateQuestions":95.49,"TwitterSemEval2015":68.61,"TwitterURLCorpus":85.16} -{"level_0":125,"index":171,"Rank":126,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.04,"SprintDuplicateQuestions":95.48,"TwitterSemEval2015":68.3,"TwitterURLCorpus":85.33} -{"level_0":126,"index":230,"Rank":127,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":83.04,"SprintDuplicateQuestions":90.15,"TwitterSemEval2015":73.85,"TwitterURLCorpus":85.11} -{"level_0":127,"index":42,"Rank":128,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":82.95,"SprintDuplicateQuestions":93.06,"TwitterSemEval2015":71.24,"TwitterURLCorpus":84.54} -{"level_0":128,"index":262,"Rank":129,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.83,"SprintDuplicateQuestions":93.02,"TwitterSemEval2015":71.07,"TwitterURLCorpus":84.4} -{"level_0":129,"index":107,"Rank":130,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.83,"SprintDuplicateQuestions":95.48,"TwitterSemEval2015":68.17,"TwitterURLCorpus":84.84} -{"level_0":130,"index":106,"Rank":131,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.81,"SprintDuplicateQuestions":95.78,"TwitterSemEval2015":67.76,"TwitterURLCorpus":84.89} -{"level_0":131,"index":68,"Rank":132,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.69,"SprintDuplicateQuestions":95.35,"TwitterSemEval2015":67.97,"TwitterURLCorpus":84.75} -{"level_0":132,"index":76,"Rank":133,"Model":"gte-micro-v4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.59,"SprintDuplicateQuestions":95.54,"TwitterSemEval2015":67.55,"TwitterURLCorpus":84.68} -{"level_0":133,"index":162,"Rank":134,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":82.59,"SprintDuplicateQuestions":92.18,"TwitterSemEval2015":70.75,"TwitterURLCorpus":84.83} -{"level_0":134,"index":214,"Rank":135,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":82.54,"SprintDuplicateQuestions":95.55,"TwitterSemEval2015":66.85,"TwitterURLCorpus":85.21} -{"level_0":135,"index":228,"Rank":136,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":82.41,"SprintDuplicateQuestions":92.45,"TwitterSemEval2015":70.02,"TwitterURLCorpus":84.77} -{"level_0":136,"index":229,"Rank":137,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":82.37,"SprintDuplicateQuestions":94.55,"TwitterSemEval2015":67.86,"TwitterURLCorpus":84.7} -{"level_0":137,"index":192,"Rank":138,"Model":"all-MiniLM-L6-v2-ds<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.37,"SprintDuplicateQuestions":94.55,"TwitterSemEval2015":67.86,"TwitterURLCorpus":84.7} -{"level_0":138,"index":212,"Rank":139,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":82.26,"SprintDuplicateQuestions":90.06,"TwitterSemEval2015":71.68,"TwitterURLCorpus":85.03} -{"level_0":139,"index":112,"Rank":140,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.14,"SprintDuplicateQuestions":92.92,"TwitterSemEval2015":67.8,"TwitterURLCorpus":85.71} -{"level_0":140,"index":128,"Rank":141,"Model":"cai-stellaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.11,"SprintDuplicateQuestions":93.23,"TwitterSemEval2015":69.6,"TwitterURLCorpus":83.49} -{"level_0":141,"index":103,"Rank":142,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.07,"SprintDuplicateQuestions":95.28,"TwitterSemEval2015":65.78,"TwitterURLCorpus":85.15} -{"level_0":142,"index":101,"Rank":143,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.02,"SprintDuplicateQuestions":94.67,"TwitterSemEval2015":66.17,"TwitterURLCorpus":85.21} -{"level_0":143,"index":83,"Rank":144,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.0,"SprintDuplicateQuestions":93.84,"TwitterSemEval2015":66.87,"TwitterURLCorpus":85.29} -{"level_0":144,"index":203,"Rank":145,"Model":"bge-small-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.93,"SprintDuplicateQuestions":94.59,"TwitterSemEval2015":67.01,"TwitterURLCorpus":84.2} -{"level_0":145,"index":123,"Rank":146,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.9,"SprintDuplicateQuestions":94.93,"TwitterSemEval2015":65.31,"TwitterURLCorpus":85.46} -{"level_0":146,"index":105,"Rank":147,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.77,"SprintDuplicateQuestions":95.3,"TwitterSemEval2015":65.79,"TwitterURLCorpus":84.22} -{"level_0":147,"index":239,"Rank":148,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":81.74,"SprintDuplicateQuestions":96.09,"TwitterSemEval2015":65.95,"TwitterURLCorpus":83.17} -{"level_0":148,"index":113,"Rank":149,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.4,"SprintDuplicateQuestions":95.15,"TwitterSemEval2015":65.79,"TwitterURLCorpus":83.27} -{"level_0":149,"index":104,"Rank":150,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.33,"SprintDuplicateQuestions":94.96,"TwitterSemEval2015":64.32,"TwitterURLCorpus":84.7} -{"level_0":150,"index":285,"Rank":151,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":81.32,"SprintDuplicateQuestions":94.53,"TwitterSemEval2015":64.41,"TwitterURLCorpus":85.01} -{"level_0":151,"index":100,"Rank":152,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.07,"SprintDuplicateQuestions":95.21,"TwitterSemEval2015":63.28,"TwitterURLCorpus":84.72} -{"level_0":152,"index":168,"Rank":153,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.05,"SprintDuplicateQuestions":92.09,"TwitterSemEval2015":65.96,"TwitterURLCorpus":85.11} -{"level_0":153,"index":132,"Rank":154,"Model":"embedder-100p<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.03,"SprintDuplicateQuestions":90.68,"TwitterSemEval2015":67.77,"TwitterURLCorpus":84.65} -{"level_0":154,"index":65,"Rank":155,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":80.94,"SprintDuplicateQuestions":91.3,"TwitterSemEval2015":68.76,"TwitterURLCorpus":82.76} -{"level_0":155,"index":134,"Rank":156,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":80.86,"SprintDuplicateQuestions":93.84,"TwitterSemEval2015":64.72,"TwitterURLCorpus":84.01} -{"level_0":156,"index":242,"Rank":157,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":80.81,"SprintDuplicateQuestions":90.55,"TwitterSemEval2015":66.75,"TwitterURLCorpus":85.14} -{"level_0":157,"index":121,"Rank":158,"Model":"snowflake-arctic-embed-l-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.8,"SprintDuplicateQuestions":95.2,"TwitterSemEval2015":62.35,"TwitterURLCorpus":84.84} -{"level_0":158,"index":99,"Rank":159,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.8,"SprintDuplicateQuestions":95.2,"TwitterSemEval2015":62.35,"TwitterURLCorpus":84.84} -{"level_0":159,"index":73,"Rank":160,"Model":"gte-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.73,"SprintDuplicateQuestions":94.26,"TwitterSemEval2015":63.85,"TwitterURLCorpus":84.07} -{"level_0":160,"index":82,"Rank":161,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.65,"SprintDuplicateQuestions":93.47,"TwitterSemEval2015":63.68,"TwitterURLCorpus":84.8} -{"level_0":161,"index":72,"Rank":162,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.62,"SprintDuplicateQuestions":89.85,"TwitterSemEval2015":67.48,"TwitterURLCorpus":84.53} -{"level_0":162,"index":71,"Rank":163,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.57,"SprintDuplicateQuestions":89.26,"TwitterSemEval2015":69.26,"TwitterURLCorpus":83.19} -{"level_0":163,"index":77,"Rank":164,"Model":"test24<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.4,"SprintDuplicateQuestions":94.4,"TwitterSemEval2015":63.86,"TwitterURLCorpus":82.95} -{"level_0":164,"index":44,"Rank":165,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.4,"SprintDuplicateQuestions":90.41,"TwitterSemEval2015":67.67,"TwitterURLCorpus":83.11} -{"level_0":165,"index":98,"Rank":166,"Model":"yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.84,"SprintDuplicateQuestions":92.98,"TwitterSemEval2015":62.44,"TwitterURLCorpus":84.1} -{"level_0":166,"index":78,"Rank":167,"Model":"test25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.7,"SprintDuplicateQuestions":92.14,"TwitterSemEval2015":63.44,"TwitterURLCorpus":83.53} -{"level_0":167,"index":260,"Rank":168,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.62,"SprintDuplicateQuestions":90.89,"TwitterSemEval2015":63.76,"TwitterURLCorpus":84.2} -{"level_0":168,"index":79,"Rank":169,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.58,"SprintDuplicateQuestions":92.58,"TwitterSemEval2015":62.37,"TwitterURLCorpus":83.79} -{"level_0":169,"index":258,"Rank":170,"Model":"universal-sentence-encoder-large-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.53,"SprintDuplicateQuestions":84.37,"TwitterSemEval2015":70.13,"TwitterURLCorpus":84.09} -{"level_0":170,"index":185,"Rank":171,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.23,"SprintDuplicateQuestions":86.13,"TwitterSemEval2015":68.03,"TwitterURLCorpus":83.52} -{"level_0":171,"index":173,"Rank":172,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.22,"SprintDuplicateQuestions":94.93,"TwitterSemEval2015":59.3,"TwitterURLCorpus":83.44} -{"level_0":172,"index":227,"Rank":173,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":78.87,"SprintDuplicateQuestions":89.26,"TwitterSemEval2015":62.78,"TwitterURLCorpus":84.58} -{"level_0":173,"index":70,"Rank":174,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.57,"SprintDuplicateQuestions":85.22,"TwitterSemEval2015":67.56,"TwitterURLCorpus":82.94} -{"level_0":174,"index":241,"Rank":175,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":78.45,"SprintDuplicateQuestions":89.46,"TwitterSemEval2015":62.06,"TwitterURLCorpus":83.83} -{"level_0":175,"index":63,"Rank":176,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":78.01,"SprintDuplicateQuestions":88.14,"TwitterSemEval2015":66.6,"TwitterURLCorpus":79.3} -{"level_0":176,"index":61,"Rank":177,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":77.88,"SprintDuplicateQuestions":87.57,"TwitterSemEval2015":65.14,"TwitterURLCorpus":80.94} -{"level_0":177,"index":184,"Rank":178,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.83,"SprintDuplicateQuestions":88.21,"TwitterSemEval2015":64.8,"TwitterURLCorpus":80.49} -{"level_0":178,"index":279,"Rank":179,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.79,"SprintDuplicateQuestions":79.85,"TwitterSemEval2015":69.45,"TwitterURLCorpus":84.06} -{"level_0":179,"index":251,"Rank":180,"Model":"Angle_BERT<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.75,"SprintDuplicateQuestions":82.81,"TwitterSemEval2015":66.16,"TwitterURLCorpus":84.28} -{"level_0":180,"index":234,"Rank":181,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":77.61,"SprintDuplicateQuestions":87.15,"TwitterSemEval2015":61.67,"TwitterURLCorpus":84.02} -{"level_0":181,"index":278,"Rank":182,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.46,"SprintDuplicateQuestions":76.46,"TwitterSemEval2015":70.85,"TwitterURLCorpus":85.08} -{"level_0":182,"index":84,"Rank":183,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.03,"SprintDuplicateQuestions":80.54,"TwitterSemEval2015":66.0,"TwitterURLCorpus":84.54} -{"level_0":183,"index":30,"Rank":184,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.96,"SprintDuplicateQuestions":90.71,"TwitterSemEval2015":58.07,"TwitterURLCorpus":82.09} -{"level_0":184,"index":183,"Rank":185,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.9,"SprintDuplicateQuestions":87.07,"TwitterSemEval2015":62.51,"TwitterURLCorpus":81.11} -{"level_0":185,"index":277,"Rank":186,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.86,"SprintDuplicateQuestions":77.85,"TwitterSemEval2015":69.04,"TwitterURLCorpus":83.69} -{"level_0":186,"index":248,"Rank":187,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.49,"SprintDuplicateQuestions":83.53,"TwitterSemEval2015":62.49,"TwitterURLCorpus":83.46} -{"level_0":187,"index":257,"Rank":188,"Model":"universal-sentence-encoder-4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.23,"SprintDuplicateQuestions":86.37,"TwitterSemEval2015":60.64,"TwitterURLCorpus":81.68} -{"level_0":188,"index":280,"Rank":189,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.9,"SprintDuplicateQuestions":69.52,"TwitterSemEval2015":74.42,"TwitterURLCorpus":83.75} -{"level_0":189,"index":259,"Rank":190,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.43,"SprintDuplicateQuestions":87.05,"TwitterSemEval2015":57.03,"TwitterURLCorpus":82.21} -{"level_0":190,"index":80,"Rank":191,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.23,"SprintDuplicateQuestions":89.89,"TwitterSemEval2015":54.75,"TwitterURLCorpus":81.06} -{"level_0":191,"index":59,"Rank":192,"Model":"paraphrase-multilingual-mpnet-base-v2-KE_Sieve<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.53,"SprintDuplicateQuestions":77.36,"TwitterSemEval2015":63.58,"TwitterURLCorpus":82.64} -{"level_0":192,"index":217,"Rank":193,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":73.68,"SprintDuplicateQuestions":69.39,"TwitterSemEval2015":67.75,"TwitterURLCorpus":83.89} -{"level_0":193,"index":233,"Rank":194,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":72.94,"SprintDuplicateQuestions":85.55,"TwitterSemEval2015":53.85,"TwitterURLCorpus":79.41} -{"level_0":194,"index":67,"Rank":195,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":72.21,"SprintDuplicateQuestions":77.36,"TwitterSemEval2015":61.54,"TwitterURLCorpus":77.73} -{"level_0":195,"index":256,"Rank":196,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.88,"SprintDuplicateQuestions":75.53,"TwitterSemEval2015":58.77,"TwitterURLCorpus":81.33} -{"level_0":196,"index":81,"Rank":197,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.78,"SprintDuplicateQuestions":77.73,"TwitterSemEval2015":57.09,"TwitterURLCorpus":80.51} -{"level_0":197,"index":141,"Rank":198,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.4,"SprintDuplicateQuestions":77.08,"TwitterSemEval2015":53.58,"TwitterURLCorpus":83.53} -{"level_0":198,"index":232,"Rank":199,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":70.92,"SprintDuplicateQuestions":86.96,"TwitterSemEval2015":48.45,"TwitterURLCorpus":77.35} -{"level_0":199,"index":218,"Rank":200,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":70.33,"SprintDuplicateQuestions":69.41,"TwitterSemEval2015":60.21,"TwitterURLCorpus":81.37} -{"level_0":200,"index":11,"Rank":201,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":68.86,"SprintDuplicateQuestions":65.54,"TwitterSemEval2015":59.57,"TwitterURLCorpus":81.47} -{"level_0":201,"index":231,"Rank":202,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":61.37,"SprintDuplicateQuestions":71.63,"TwitterSemEval2015":43.25,"TwitterURLCorpus":69.22} -{"level_0":202,"index":122,"Rank":203,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":56.33,"SprintDuplicateQuestions":36.81,"TwitterSemEval2015":55.9,"TwitterURLCorpus":76.29} -{"level_0":203,"index":255,"Rank":204,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":30.61,"SprintDuplicateQuestions":17.4,"TwitterSemEval2015":30.38,"TwitterURLCorpus":44.04} -{"level_0":204,"index":263,"Rank":205,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":0.79,"SprintDuplicateQuestions":0.85,"TwitterSemEval2015":0.67,"TwitterURLCorpus":0.84} -{"level_0":205,"index":97,"Rank":245,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","SprintDuplicateQuestions":57.56,"TwitterSemEval2015":"","TwitterURLCorpus":""} +{"Rank":1,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":89.24,"SprintDuplicateQuestions":94.5,"TwitterSemEval2015":86.32,"TwitterURLCorpus":86.9} +{"Rank":2,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":88.03,"SprintDuplicateQuestions":96.83,"TwitterSemEval2015":80.7,"TwitterURLCorpus":86.56} +{"Rank":3,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":87.99,"SprintDuplicateQuestions":96.82,"TwitterSemEval2015":80.6,"TwitterURLCorpus":86.56} +{"Rank":4,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":87.79,"SprintDuplicateQuestions":95.09,"TwitterSemEval2015":81.73,"TwitterURLCorpus":86.56} +{"Rank":5,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":87.61,"SprintDuplicateQuestions":96.26,"TwitterSemEval2015":79.04,"TwitterURLCorpus":87.53} +{"Rank":6,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":87.38,"SprintDuplicateQuestions":95.99,"TwitterSemEval2015":79.36,"TwitterURLCorpus":86.79} +{"Rank":7,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":87.25,"SprintDuplicateQuestions":96.49,"TwitterSemEval2015":78.23,"TwitterURLCorpus":87.04} +{"Rank":8,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":86.87,"SprintDuplicateQuestions":98.07,"TwitterSemEval2015":74.44,"TwitterURLCorpus":88.11} +{"Rank":9,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.57,"SprintDuplicateQuestions":96.01,"TwitterSemEval2015":76.87,"TwitterURLCorpus":86.84} +{"Rank":10,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":86.21,"SprintDuplicateQuestions":96.25,"TwitterSemEval2015":76.14,"TwitterURLCorpus":86.23} +{"Rank":11,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":86.13,"SprintDuplicateQuestions":95.45,"TwitterSemEval2015":77.81,"TwitterURLCorpus":85.14} +{"Rank":12,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":86.12,"SprintDuplicateQuestions":95.68,"TwitterSemEval2015":77.54,"TwitterURLCorpus":85.13} +{"Rank":13,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":86.06,"SprintDuplicateQuestions":91.44,"TwitterSemEval2015":80.89,"TwitterURLCorpus":85.86} +{"Rank":14,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.72,"SprintDuplicateQuestions":92.25,"TwitterSemEval2015":77.13,"TwitterURLCorpus":87.78} +{"Rank":15,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":85.32,"SprintDuplicateQuestions":95.05,"TwitterSemEval2015":76.03,"TwitterURLCorpus":84.89} +{"Rank":16,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":85.18,"SprintDuplicateQuestions":91.23,"TwitterSemEval2015":78.25,"TwitterURLCorpus":86.05} +{"Rank":17,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":85.06,"SprintDuplicateQuestions":88.89,"TwitterSemEval2015":80.28,"TwitterURLCorpus":86.01} +{"Rank":18,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.04,"SprintDuplicateQuestions":94.58,"TwitterSemEval2015":73.33,"TwitterURLCorpus":87.21} +{"Rank":19,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":84.97,"SprintDuplicateQuestions":89.01,"TwitterSemEval2015":79.75,"TwitterURLCorpus":86.14} +{"Rank":20,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.89,"SprintDuplicateQuestions":92.17,"TwitterSemEval2015":75.28,"TwitterURLCorpus":87.22} +{"Rank":21,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":84.75,"SprintDuplicateQuestions":93.14,"TwitterSemEval2015":75.28,"TwitterURLCorpus":85.83} +{"Rank":22,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":84.59,"SprintDuplicateQuestions":92.91,"TwitterSemEval2015":74.3,"TwitterURLCorpus":86.57} +{"Rank":23,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.22,"SprintDuplicateQuestions":89.02,"TwitterSemEval2015":76.56,"TwitterURLCorpus":87.09} +{"Rank":24,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":84.09,"SprintDuplicateQuestions":92.31,"TwitterSemEval2015":73.61,"TwitterURLCorpus":86.34} +{"Rank":25,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":83.85,"SprintDuplicateQuestions":94.55,"TwitterSemEval2015":72.23,"TwitterURLCorpus":84.77} +{"Rank":26,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":83.57,"SprintDuplicateQuestions":93.02,"TwitterSemEval2015":72.21,"TwitterURLCorpus":85.48} +{"Rank":27,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":83.54,"SprintDuplicateQuestions":91.45,"TwitterSemEval2015":73.23,"TwitterURLCorpus":85.93} +{"Rank":28,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":83.04,"SprintDuplicateQuestions":90.15,"TwitterSemEval2015":73.85,"TwitterURLCorpus":85.11} +{"Rank":29,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":82.95,"SprintDuplicateQuestions":93.06,"TwitterSemEval2015":71.24,"TwitterURLCorpus":84.54} +{"Rank":30,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":82.65,"SprintDuplicateQuestions":92.18,"TwitterSemEval2015":70.75,"TwitterURLCorpus":85.03} +{"Rank":31,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":82.54,"SprintDuplicateQuestions":95.55,"TwitterSemEval2015":66.85,"TwitterURLCorpus":85.21} +{"Rank":32,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":82.41,"SprintDuplicateQuestions":92.45,"TwitterSemEval2015":70.02,"TwitterURLCorpus":84.77} +{"Rank":33,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":82.37,"SprintDuplicateQuestions":94.55,"TwitterSemEval2015":67.86,"TwitterURLCorpus":84.7} +{"Rank":34,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":82.26,"SprintDuplicateQuestions":90.06,"TwitterSemEval2015":71.68,"TwitterURLCorpus":85.03} +{"Rank":35,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":81.74,"SprintDuplicateQuestions":96.09,"TwitterSemEval2015":65.95,"TwitterURLCorpus":83.17} +{"Rank":36,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":81.32,"SprintDuplicateQuestions":94.53,"TwitterSemEval2015":64.41,"TwitterURLCorpus":85.01} +{"Rank":37,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":80.94,"SprintDuplicateQuestions":91.3,"TwitterSemEval2015":68.76,"TwitterURLCorpus":82.76} +{"Rank":38,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":80.81,"SprintDuplicateQuestions":90.55,"TwitterSemEval2015":66.75,"TwitterURLCorpus":85.14} +{"Rank":39,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":78.87,"SprintDuplicateQuestions":89.26,"TwitterSemEval2015":62.78,"TwitterURLCorpus":84.58} +{"Rank":40,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":78.45,"SprintDuplicateQuestions":89.46,"TwitterSemEval2015":62.06,"TwitterURLCorpus":83.83} +{"Rank":41,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":78.01,"SprintDuplicateQuestions":88.14,"TwitterSemEval2015":66.6,"TwitterURLCorpus":79.3} +{"Rank":42,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":77.88,"SprintDuplicateQuestions":87.57,"TwitterSemEval2015":65.14,"TwitterURLCorpus":80.94} +{"Rank":43,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.79,"SprintDuplicateQuestions":79.85,"TwitterSemEval2015":69.45,"TwitterURLCorpus":84.06} +{"Rank":44,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":77.61,"SprintDuplicateQuestions":87.15,"TwitterSemEval2015":61.67,"TwitterURLCorpus":84.02} +{"Rank":45,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.46,"SprintDuplicateQuestions":76.46,"TwitterSemEval2015":70.85,"TwitterURLCorpus":85.08} +{"Rank":46,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.86,"SprintDuplicateQuestions":77.85,"TwitterSemEval2015":69.04,"TwitterURLCorpus":83.69} +{"Rank":47,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.9,"SprintDuplicateQuestions":69.52,"TwitterSemEval2015":74.42,"TwitterURLCorpus":83.75} +{"Rank":48,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":73.68,"SprintDuplicateQuestions":69.39,"TwitterSemEval2015":67.75,"TwitterURLCorpus":83.89} +{"Rank":49,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":72.94,"SprintDuplicateQuestions":85.55,"TwitterSemEval2015":53.85,"TwitterURLCorpus":79.41} +{"Rank":50,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":72.21,"SprintDuplicateQuestions":77.36,"TwitterSemEval2015":61.54,"TwitterURLCorpus":77.73} +{"Rank":51,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":70.92,"SprintDuplicateQuestions":86.96,"TwitterSemEval2015":48.45,"TwitterURLCorpus":77.35} +{"Rank":52,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":70.33,"SprintDuplicateQuestions":69.41,"TwitterSemEval2015":60.21,"TwitterURLCorpus":81.37} +{"Rank":53,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":68.86,"SprintDuplicateQuestions":65.54,"TwitterSemEval2015":59.57,"TwitterURLCorpus":81.47} +{"Rank":54,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":61.37,"SprintDuplicateQuestions":71.63,"TwitterSemEval2015":43.25,"TwitterURLCorpus":69.22} +{"Rank":55,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":56.33,"SprintDuplicateQuestions":36.81,"TwitterSemEval2015":55.9,"TwitterURLCorpus":76.29} +{"Rank":56,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":57,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":58,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":59,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":60,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":61,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":62,"Model":"bm25s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":63,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":64,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":65,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":66,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":67,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":68,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":69,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":70,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":71,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":72,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":73,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":74,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":75,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":76,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":77,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":78,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":79,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":80,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":81,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":82,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":83,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":84,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":85,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":86,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":87,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":88,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":89,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":90,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":91,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":92,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":93,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":94,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":95,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":96,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":97,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":98,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":99,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":100,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":101,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":102,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":103,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":104,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":105,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":106,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":107,"Model":"text-search-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":108,"Model":"text-search-ada-doc-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":109,"Model":"text-search-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":110,"Model":"text-search-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} +{"Rank":111,"Model":"text-search-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SprintDuplicateQuestions":null,"TwitterSemEval2015":null,"TwitterURLCorpus":null} diff --git a/boards_data/en/data_tasks/Reranking/default.jsonl b/boards_data/en/data_tasks/Reranking/default.jsonl index f4484483b5250d193dcf10d398daea747ea18630..2ef606fdde7aa475db97e9c7154075fb31a9f313 100644 --- a/boards_data/en/data_tasks/Reranking/default.jsonl +++ b/boards_data/en/data_tasks/Reranking/default.jsonl @@ -1,199 +1,111 @@ -{"level_0":0,"index":51,"Rank":1,"Model":"neural-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.49,"AskUbuntuDupQuestions":68.45,"MindSmallReranking":31.99,"SciDocsRR":89.26,"StackOverflowDupQuestions":56.26} -{"level_0":1,"index":17,"Rank":2,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":61.42,"AskUbuntuDupQuestions":67.58,"MindSmallReranking":33.36,"SciDocsRR":89.09,"StackOverflowDupQuestions":55.66} -{"level_0":2,"index":126,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.42,"AskUbuntuDupQuestions":67.58,"MindSmallReranking":33.36,"SciDocsRR":89.09,"StackOverflowDupQuestions":55.66} -{"level_0":3,"index":205,"Rank":4,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.42,"AskUbuntuDupQuestions":67.58,"MindSmallReranking":33.36,"SciDocsRR":89.09,"StackOverflowDupQuestions":55.66} -{"level_0":4,"index":138,"Rank":5,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.21,"AskUbuntuDupQuestions":67.33,"MindSmallReranking":33.05,"SciDocsRR":89.2,"StackOverflowDupQuestions":55.25} -{"level_0":5,"index":96,"Rank":6,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":60.64,"AskUbuntuDupQuestions":67.58,"MindSmallReranking":32.72,"SciDocsRR":86.58,"StackOverflowDupQuestions":55.68} -{"level_0":6,"index":215,"Rank":7,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Average":60.54,"AskUbuntuDupQuestions":67.5,"MindSmallReranking":30.82,"SciDocsRR":87.26,"StackOverflowDupQuestions":56.58} -{"level_0":7,"index":219,"Rank":8,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.46,"AskUbuntuDupQuestions":66.73,"MindSmallReranking":32.51,"SciDocsRR":87.03,"StackOverflowDupQuestions":55.55} -{"level_0":8,"index":58,"Rank":9,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":60.29,"AskUbuntuDupQuestions":66.82,"MindSmallReranking":32.06,"SciDocsRR":86.4,"StackOverflowDupQuestions":55.89} -{"level_0":9,"index":156,"Rank":10,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":60.21,"AskUbuntuDupQuestions":66.98,"MindSmallReranking":32.6,"SciDocsRR":86.33,"StackOverflowDupQuestions":54.91} -{"level_0":10,"index":139,"Rank":11,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.16,"AskUbuntuDupQuestions":66.15,"MindSmallReranking":33.05,"SciDocsRR":88.44,"StackOverflowDupQuestions":52.99} -{"level_0":11,"index":95,"Rank":12,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.14,"AskUbuntuDupQuestions":66.71,"MindSmallReranking":31.26,"SciDocsRR":87.29,"StackOverflowDupQuestions":55.32} -{"level_0":12,"index":15,"Rank":13,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":60.13,"AskUbuntuDupQuestions":66.0,"MindSmallReranking":32.71,"SciDocsRR":87.89,"StackOverflowDupQuestions":53.93} -{"level_0":13,"index":194,"Rank":14,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.11,"AskUbuntuDupQuestions":65.16,"MindSmallReranking":32.54,"SciDocsRR":87.53,"StackOverflowDupQuestions":55.22} -{"level_0":14,"index":133,"Rank":15,"Model":"mxbai-embed-large-v1-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.11,"AskUbuntuDupQuestions":65.16,"MindSmallReranking":32.54,"SciDocsRR":87.53,"StackOverflowDupQuestions":55.22} -{"level_0":15,"index":6,"Rank":16,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.09,"AskUbuntuDupQuestions":64.92,"MindSmallReranking":30.97,"SciDocsRR":89.34,"StackOverflowDupQuestions":55.11} -{"level_0":16,"index":117,"Rank":17,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.05,"AskUbuntuDupQuestions":64.7,"MindSmallReranking":32.82,"SciDocsRR":87.8,"StackOverflowDupQuestions":54.88} -{"level_0":17,"index":186,"Rank":18,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.04,"AskUbuntuDupQuestions":64.46,"MindSmallReranking":32.27,"SciDocsRR":87.56,"StackOverflowDupQuestions":55.85} -{"level_0":18,"index":114,"Rank":19,"Model":"bge-large-en-v1.5-Q4_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.03,"AskUbuntuDupQuestions":64.47,"MindSmallReranking":32.06,"SciDocsRR":87.63,"StackOverflowDupQuestions":55.95} -{"level_0":19,"index":150,"Rank":20,"Model":"bge-large-en-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.03,"AskUbuntuDupQuestions":64.47,"MindSmallReranking":32.06,"SciDocsRR":87.63,"StackOverflowDupQuestions":55.95} -{"level_0":20,"index":22,"Rank":21,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":60.03,"AskUbuntuDupQuestions":64.47,"MindSmallReranking":32.06,"SciDocsRR":87.63,"StackOverflowDupQuestions":55.95} -{"level_0":21,"index":197,"Rank":22,"Model":"b1ade-embed-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.0,"AskUbuntuDupQuestions":64.42,"MindSmallReranking":32.78,"SciDocsRR":87.61,"StackOverflowDupQuestions":55.19} -{"level_0":22,"index":261,"Rank":23,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.0,"AskUbuntuDupQuestions":64.42,"MindSmallReranking":32.78,"SciDocsRR":87.61,"StackOverflowDupQuestions":55.19} -{"level_0":23,"index":204,"Rank":24,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.98,"AskUbuntuDupQuestions":64.55,"MindSmallReranking":33.94,"SciDocsRR":86.52,"StackOverflowDupQuestions":54.91} -{"level_0":24,"index":16,"Rank":25,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.98,"AskUbuntuDupQuestions":64.55,"MindSmallReranking":33.94,"SciDocsRR":86.52,"StackOverflowDupQuestions":54.91} -{"level_0":25,"index":53,"Rank":26,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.97,"AskUbuntuDupQuestions":64.29,"MindSmallReranking":32.66,"SciDocsRR":87.65,"StackOverflowDupQuestions":55.28} -{"level_0":26,"index":111,"Rank":27,"Model":"UAE-Large-V1-Q5_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.88,"AskUbuntuDupQuestions":64.2,"MindSmallReranking":32.51,"SciDocsRR":87.49,"StackOverflowDupQuestions":55.32} -{"level_0":27,"index":108,"Rank":28,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.88,"AskUbuntuDupQuestions":64.2,"MindSmallReranking":32.51,"SciDocsRR":87.49,"StackOverflowDupQuestions":55.32} -{"level_0":28,"index":165,"Rank":29,"Model":"UAE-Large-V1-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.88,"AskUbuntuDupQuestions":64.2,"MindSmallReranking":32.51,"SciDocsRR":87.49,"StackOverflowDupQuestions":55.32} -{"level_0":29,"index":170,"Rank":30,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.86,"AskUbuntuDupQuestions":64.32,"MindSmallReranking":32.27,"SciDocsRR":87.47,"StackOverflowDupQuestions":55.4} -{"level_0":30,"index":21,"Rank":31,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.86,"AskUbuntuDupQuestions":65.15,"MindSmallReranking":30.6,"SciDocsRR":86.96,"StackOverflowDupQuestions":56.71} -{"level_0":31,"index":43,"Rank":32,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.8,"AskUbuntuDupQuestions":65.6,"MindSmallReranking":32.84,"SciDocsRR":86.43,"StackOverflowDupQuestions":54.33} -{"level_0":32,"index":8,"Rank":33,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.74,"AskUbuntuDupQuestions":65.77,"MindSmallReranking":31.69,"SciDocsRR":87.03,"StackOverflowDupQuestions":54.49} -{"level_0":33,"index":23,"Rank":34,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.72,"AskUbuntuDupQuestions":64.59,"MindSmallReranking":31.79,"SciDocsRR":87.6,"StackOverflowDupQuestions":54.9} -{"level_0":34,"index":62,"Rank":35,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":59.68,"AskUbuntuDupQuestions":65.19,"MindSmallReranking":32.67,"SciDocsRR":86.05,"StackOverflowDupQuestions":54.82} -{"level_0":35,"index":115,"Rank":36,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.37,"AskUbuntuDupQuestions":62.69,"MindSmallReranking":32.36,"SciDocsRR":87.72,"StackOverflowDupQuestions":54.72} -{"level_0":36,"index":230,"Rank":37,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":59.36,"AskUbuntuDupQuestions":65.85,"MindSmallReranking":30.97,"SciDocsRR":88.65,"StackOverflowDupQuestions":51.98} -{"level_0":37,"index":283,"Rank":38,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.16,"AskUbuntuDupQuestions":65.03,"MindSmallReranking":29.86,"SciDocsRR":86.66,"StackOverflowDupQuestions":55.08} -{"level_0":38,"index":253,"Rank":39,"Model":"gte-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.13,"AskUbuntuDupQuestions":63.06,"MindSmallReranking":32.63,"SciDocsRR":87.2,"StackOverflowDupQuestions":53.63} -{"level_0":39,"index":198,"Rank":40,"Model":"bge-base-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.94,"AskUbuntuDupQuestions":62.39,"MindSmallReranking":31.89,"SciDocsRR":87.05,"StackOverflowDupQuestions":54.45} -{"level_0":40,"index":193,"Rank":41,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.94,"AskUbuntuDupQuestions":63.77,"MindSmallReranking":31.85,"SciDocsRR":86.98,"StackOverflowDupQuestions":53.16} -{"level_0":41,"index":1,"Rank":42,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":58.9,"AskUbuntuDupQuestions":64.4,"MindSmallReranking":33.07,"SciDocsRR":83.59,"StackOverflowDupQuestions":54.56} -{"level_0":42,"index":120,"Rank":43,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.86,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":31.2,"SciDocsRR":87.49,"StackOverflowDupQuestions":54.61} -{"level_0":43,"index":20,"Rank":44,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":58.86,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":31.2,"SciDocsRR":87.49,"StackOverflowDupQuestions":54.61} -{"level_0":44,"index":179,"Rank":45,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.86,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":31.2,"SciDocsRR":87.49,"StackOverflowDupQuestions":54.61} -{"level_0":45,"index":181,"Rank":46,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.86,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":31.2,"SciDocsRR":87.49,"StackOverflowDupQuestions":54.61} -{"level_0":46,"index":182,"Rank":47,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.86,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":31.2,"SciDocsRR":87.49,"StackOverflowDupQuestions":54.61} -{"level_0":47,"index":180,"Rank":48,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.86,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":31.2,"SciDocsRR":87.49,"StackOverflowDupQuestions":54.61} -{"level_0":48,"index":151,"Rank":49,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.78,"AskUbuntuDupQuestions":62.72,"MindSmallReranking":31.91,"SciDocsRR":86.66,"StackOverflowDupQuestions":53.81} -{"level_0":49,"index":137,"Rank":50,"Model":"stella-base-en-v2-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.78,"AskUbuntuDupQuestions":62.72,"MindSmallReranking":31.91,"SciDocsRR":86.66,"StackOverflowDupQuestions":53.81} -{"level_0":50,"index":252,"Rank":51,"Model":"gte-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.61,"AskUbuntuDupQuestions":61.8,"MindSmallReranking":32.54,"SciDocsRR":87.08,"StackOverflowDupQuestions":53.01} -{"level_0":51,"index":161,"Rank":52,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.58,"AskUbuntuDupQuestions":63.89,"MindSmallReranking":33.09,"SciDocsRR":85.87,"StackOverflowDupQuestions":51.45} -{"level_0":52,"index":118,"Rank":53,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.56,"AskUbuntuDupQuestions":62.33,"MindSmallReranking":32.36,"SciDocsRR":86.48,"StackOverflowDupQuestions":53.06} -{"level_0":53,"index":19,"Rank":54,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.5,"AskUbuntuDupQuestions":63.13,"MindSmallReranking":31.46,"SciDocsRR":86.93,"StackOverflowDupQuestions":52.48} -{"level_0":54,"index":228,"Rank":55,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":58.44,"AskUbuntuDupQuestions":64.06,"MindSmallReranking":31.02,"SciDocsRR":87.2,"StackOverflowDupQuestions":51.47} -{"level_0":55,"index":64,"Rank":56,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":58.42,"AskUbuntuDupQuestions":63.98,"MindSmallReranking":31.5,"SciDocsRR":83.8,"StackOverflowDupQuestions":54.41} -{"level_0":56,"index":24,"Rank":57,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":58.36,"AskUbuntuDupQuestions":62.59,"MindSmallReranking":31.29,"SciDocsRR":85.94,"StackOverflowDupQuestions":53.64} -{"level_0":57,"index":125,"Rank":58,"Model":"NoInstruct-small-Embedding-v0-Q4_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.3,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":31.92,"SciDocsRR":86.18,"StackOverflowDupQuestions":52.96} -{"level_0":58,"index":119,"Rank":59,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.3,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":31.92,"SciDocsRR":86.18,"StackOverflowDupQuestions":52.96} -{"level_0":59,"index":9,"Rank":60,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":58.24,"AskUbuntuDupQuestions":63.24,"MindSmallReranking":31.48,"SciDocsRR":84.68,"StackOverflowDupQuestions":53.56} -{"level_0":60,"index":178,"Rank":61,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.14,"AskUbuntuDupQuestions":64.13,"MindSmallReranking":32.92,"SciDocsRR":83.68,"StackOverflowDupQuestions":51.84} -{"level_0":61,"index":229,"Rank":62,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":58.04,"AskUbuntuDupQuestions":63.48,"MindSmallReranking":30.8,"SciDocsRR":87.12,"StackOverflowDupQuestions":50.76} -{"level_0":62,"index":192,"Rank":63,"Model":"all-MiniLM-L6-v2-ds<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.04,"AskUbuntuDupQuestions":63.48,"MindSmallReranking":30.8,"SciDocsRR":87.12,"StackOverflowDupQuestions":50.76} -{"level_0":63,"index":34,"Rank":64,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.01,"AskUbuntuDupQuestions":61.7,"MindSmallReranking":32.67,"SciDocsRR":85.2,"StackOverflowDupQuestions":52.48} -{"level_0":64,"index":282,"Rank":65,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.99,"AskUbuntuDupQuestions":64.61,"MindSmallReranking":29.63,"SciDocsRR":84.25,"StackOverflowDupQuestions":53.46} -{"level_0":65,"index":116,"Rank":66,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.95,"AskUbuntuDupQuestions":62.97,"MindSmallReranking":31.31,"SciDocsRR":87.14,"StackOverflowDupQuestions":50.36} -{"level_0":66,"index":202,"Rank":67,"Model":"bge-small-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.86,"AskUbuntuDupQuestions":62.4,"MindSmallReranking":31.28,"SciDocsRR":85.01,"StackOverflowDupQuestions":52.75} -{"level_0":67,"index":36,"Rank":68,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.86,"AskUbuntuDupQuestions":62.13,"MindSmallReranking":32.59,"SciDocsRR":84.31,"StackOverflowDupQuestions":52.4} -{"level_0":68,"index":0,"Rank":69,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":57.78,"AskUbuntuDupQuestions":63.84,"MindSmallReranking":31.89,"SciDocsRR":81.62,"StackOverflowDupQuestions":53.76} -{"level_0":69,"index":254,"Rank":70,"Model":"gte-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.7,"AskUbuntuDupQuestions":61.72,"MindSmallReranking":32.21,"SciDocsRR":84.83,"StackOverflowDupQuestions":52.03} -{"level_0":70,"index":18,"Rank":71,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.66,"AskUbuntuDupQuestions":61.83,"MindSmallReranking":31.34,"SciDocsRR":85.17,"StackOverflowDupQuestions":52.29} -{"level_0":71,"index":148,"Rank":72,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":57.54,"AskUbuntuDupQuestions":64.3,"MindSmallReranking":31.68,"SciDocsRR":82.0,"StackOverflowDupQuestions":52.17} -{"level_0":72,"index":112,"Rank":73,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.5,"AskUbuntuDupQuestions":63.29,"MindSmallReranking":31.3,"SciDocsRR":80.67,"StackOverflowDupQuestions":54.75} -{"level_0":73,"index":60,"Rank":74,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":57.38,"AskUbuntuDupQuestions":63.13,"MindSmallReranking":31.34,"SciDocsRR":84.03,"StackOverflowDupQuestions":51.02} -{"level_0":74,"index":149,"Rank":75,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":57.29,"AskUbuntuDupQuestions":65.35,"MindSmallReranking":31.81,"SciDocsRR":79.49,"StackOverflowDupQuestions":52.52} -{"level_0":75,"index":199,"Rank":76,"Model":"bge-base-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.18,"AskUbuntuDupQuestions":60.23,"MindSmallReranking":31.71,"SciDocsRR":84.46,"StackOverflowDupQuestions":52.32} -{"level_0":76,"index":185,"Rank":77,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.12,"AskUbuntuDupQuestions":62.73,"MindSmallReranking":30.81,"SciDocsRR":85.11,"StackOverflowDupQuestions":49.85} -{"level_0":77,"index":42,"Rank":78,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":57.09,"AskUbuntuDupQuestions":61.11,"MindSmallReranking":31.53,"SciDocsRR":84.78,"StackOverflowDupQuestions":50.95} -{"level_0":78,"index":175,"Rank":79,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":56.98,"AskUbuntuDupQuestions":62.25,"MindSmallReranking":30.54,"SciDocsRR":83.1,"StackOverflowDupQuestions":52.05} -{"level_0":79,"index":135,"Rank":80,"Model":"jina-embeddings-v2-base-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.98,"AskUbuntuDupQuestions":62.25,"MindSmallReranking":30.54,"SciDocsRR":83.1,"StackOverflowDupQuestions":52.05} -{"level_0":80,"index":284,"Rank":81,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.72,"AskUbuntuDupQuestions":62.18,"MindSmallReranking":29.93,"SciDocsRR":83.25,"StackOverflowDupQuestions":51.53} -{"level_0":81,"index":238,"Rank":82,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":56.66,"AskUbuntuDupQuestions":63.23,"MindSmallReranking":31.93,"SciDocsRR":77.96,"StackOverflowDupQuestions":53.5} -{"level_0":82,"index":93,"Rank":83,"Model":"e5-large-v2-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.61,"AskUbuntuDupQuestions":59.62,"MindSmallReranking":31.83,"SciDocsRR":84.31,"StackOverflowDupQuestions":50.68} -{"level_0":83,"index":155,"Rank":84,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":56.61,"AskUbuntuDupQuestions":59.62,"MindSmallReranking":31.83,"SciDocsRR":84.31,"StackOverflowDupQuestions":50.68} -{"level_0":84,"index":83,"Rank":85,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.56,"AskUbuntuDupQuestions":61.63,"MindSmallReranking":32.29,"SciDocsRR":80.79,"StackOverflowDupQuestions":51.53} -{"level_0":85,"index":154,"Rank":86,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":56.53,"AskUbuntuDupQuestions":60.07,"MindSmallReranking":30.78,"SciDocsRR":83.94,"StackOverflowDupQuestions":51.33} -{"level_0":86,"index":246,"Rank":87,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":56.42,"AskUbuntuDupQuestions":66.16,"MindSmallReranking":30.6,"SciDocsRR":76.09,"StackOverflowDupQuestions":52.85} -{"level_0":87,"index":172,"Rank":88,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.42,"AskUbuntuDupQuestions":62.83,"MindSmallReranking":31.48,"SciDocsRR":80.97,"StackOverflowDupQuestions":50.38} -{"level_0":88,"index":281,"Rank":89,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.32,"AskUbuntuDupQuestions":62.05,"MindSmallReranking":31.45,"SciDocsRR":81.22,"StackOverflowDupQuestions":50.54} -{"level_0":89,"index":147,"Rank":90,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":56.2,"AskUbuntuDupQuestions":63.17,"MindSmallReranking":31.82,"SciDocsRR":78.83,"StackOverflowDupQuestions":50.99} -{"level_0":90,"index":33,"Rank":91,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.09,"AskUbuntuDupQuestions":60.11,"MindSmallReranking":31.87,"SciDocsRR":81.62,"StackOverflowDupQuestions":50.76} -{"level_0":91,"index":237,"Rank":92,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":55.96,"AskUbuntuDupQuestions":63.08,"MindSmallReranking":31.5,"SciDocsRR":76.49,"StackOverflowDupQuestions":52.79} -{"level_0":92,"index":169,"Rank":93,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.91,"AskUbuntuDupQuestions":61.6,"MindSmallReranking":31.22,"SciDocsRR":80.27,"StackOverflowDupQuestions":50.55} -{"level_0":93,"index":140,"Rank":94,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":55.91,"AskUbuntuDupQuestions":58.98,"MindSmallReranking":31.29,"SciDocsRR":83.02,"StackOverflowDupQuestions":50.34} -{"level_0":94,"index":153,"Rank":95,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":55.91,"AskUbuntuDupQuestions":58.98,"MindSmallReranking":31.29,"SciDocsRR":83.02,"StackOverflowDupQuestions":50.34} -{"level_0":95,"index":160,"Rank":96,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":55.86,"AskUbuntuDupQuestions":60.28,"MindSmallReranking":31.42,"SciDocsRR":82.04,"StackOverflowDupQuestions":49.72} -{"level_0":96,"index":171,"Rank":97,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.84,"AskUbuntuDupQuestions":62.4,"MindSmallReranking":31.56,"SciDocsRR":79.31,"StackOverflowDupQuestions":50.11} -{"level_0":97,"index":213,"Rank":98,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.78,"AskUbuntuDupQuestions":61.71,"MindSmallReranking":30.31,"SciDocsRR":80.61,"StackOverflowDupQuestions":50.47} -{"level_0":98,"index":107,"Rank":99,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.77,"AskUbuntuDupQuestions":59.83,"MindSmallReranking":32.06,"SciDocsRR":81.54,"StackOverflowDupQuestions":49.65} -{"level_0":99,"index":152,"Rank":100,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":55.7,"AskUbuntuDupQuestions":59.66,"MindSmallReranking":30.07,"SciDocsRR":82.9,"StackOverflowDupQuestions":50.15} -{"level_0":100,"index":28,"Rank":101,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.69,"AskUbuntuDupQuestions":60.6,"MindSmallReranking":30.81,"SciDocsRR":81.45,"StackOverflowDupQuestions":49.9} -{"level_0":101,"index":26,"Rank":102,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.69,"AskUbuntuDupQuestions":60.6,"MindSmallReranking":30.81,"SciDocsRR":81.45,"StackOverflowDupQuestions":49.9} -{"level_0":102,"index":29,"Rank":103,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.69,"AskUbuntuDupQuestions":60.6,"MindSmallReranking":30.81,"SciDocsRR":81.45,"StackOverflowDupQuestions":49.9} -{"level_0":103,"index":27,"Rank":104,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.69,"AskUbuntuDupQuestions":60.6,"MindSmallReranking":30.81,"SciDocsRR":81.45,"StackOverflowDupQuestions":49.9} -{"level_0":104,"index":206,"Rank":105,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":55.69,"AskUbuntuDupQuestions":60.6,"MindSmallReranking":30.81,"SciDocsRR":81.45,"StackOverflowDupQuestions":49.9} -{"level_0":105,"index":129,"Rank":106,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":55.69,"AskUbuntuDupQuestions":60.6,"MindSmallReranking":30.81,"SciDocsRR":81.45,"StackOverflowDupQuestions":49.9} -{"level_0":106,"index":123,"Rank":107,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.65,"AskUbuntuDupQuestions":59.97,"MindSmallReranking":31.79,"SciDocsRR":79.77,"StackOverflowDupQuestions":51.07} -{"level_0":107,"index":211,"Rank":108,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":55.65,"AskUbuntuDupQuestions":61.6,"MindSmallReranking":30.34,"SciDocsRR":80.33,"StackOverflowDupQuestions":50.32} -{"level_0":108,"index":184,"Rank":109,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.6,"AskUbuntuDupQuestions":61.11,"MindSmallReranking":28.83,"SciDocsRR":85.93,"StackOverflowDupQuestions":46.52} -{"level_0":109,"index":183,"Rank":110,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.5,"AskUbuntuDupQuestions":58.96,"MindSmallReranking":29.32,"SciDocsRR":88.03,"StackOverflowDupQuestions":45.71} -{"level_0":110,"index":101,"Rank":111,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.47,"AskUbuntuDupQuestions":61.36,"MindSmallReranking":29.91,"SciDocsRR":79.23,"StackOverflowDupQuestions":51.38} -{"level_0":111,"index":99,"Rank":112,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.46,"AskUbuntuDupQuestions":60.35,"MindSmallReranking":29.08,"SciDocsRR":81.56,"StackOverflowDupQuestions":50.87} -{"level_0":112,"index":121,"Rank":113,"Model":"snowflake-arctic-embed-l-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.46,"AskUbuntuDupQuestions":60.35,"MindSmallReranking":29.08,"SciDocsRR":81.56,"StackOverflowDupQuestions":50.87} -{"level_0":113,"index":100,"Rank":114,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.42,"AskUbuntuDupQuestions":60.53,"MindSmallReranking":29.17,"SciDocsRR":80.81,"StackOverflowDupQuestions":51.19} -{"level_0":114,"index":66,"Rank":115,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":55.38,"AskUbuntuDupQuestions":60.71,"MindSmallReranking":31.96,"SciDocsRR":79.23,"StackOverflowDupQuestions":49.61} -{"level_0":115,"index":236,"Rank":116,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":55.36,"AskUbuntuDupQuestions":61.64,"MindSmallReranking":31.84,"SciDocsRR":76.39,"StackOverflowDupQuestions":51.58} -{"level_0":116,"index":69,"Rank":117,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.34,"AskUbuntuDupQuestions":59.7,"MindSmallReranking":31.78,"SciDocsRR":80.04,"StackOverflowDupQuestions":49.83} -{"level_0":117,"index":176,"Rank":118,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.32,"AskUbuntuDupQuestions":61.19,"MindSmallReranking":30.61,"SciDocsRR":79.3,"StackOverflowDupQuestions":50.18} -{"level_0":118,"index":103,"Rank":119,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.31,"AskUbuntuDupQuestions":61.15,"MindSmallReranking":30.6,"SciDocsRR":79.34,"StackOverflowDupQuestions":50.16} -{"level_0":119,"index":128,"Rank":120,"Model":"cai-stellaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.24,"AskUbuntuDupQuestions":60.35,"MindSmallReranking":30.98,"SciDocsRR":81.45,"StackOverflowDupQuestions":48.17} -{"level_0":120,"index":210,"Rank":121,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":55.18,"AskUbuntuDupQuestions":61.34,"MindSmallReranking":30.04,"SciDocsRR":79.4,"StackOverflowDupQuestions":49.95} -{"level_0":121,"index":177,"Rank":122,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.09,"AskUbuntuDupQuestions":59.62,"MindSmallReranking":30.99,"SciDocsRR":79.76,"StackOverflowDupQuestions":49.99} -{"level_0":122,"index":136,"Rank":123,"Model":"jina-embeddings-v2-small-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.09,"AskUbuntuDupQuestions":59.62,"MindSmallReranking":30.99,"SciDocsRR":79.76,"StackOverflowDupQuestions":49.99} -{"level_0":123,"index":35,"Rank":124,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.06,"AskUbuntuDupQuestions":58.54,"MindSmallReranking":31.36,"SciDocsRR":79.9,"StackOverflowDupQuestions":50.45} -{"level_0":124,"index":208,"Rank":125,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.99,"AskUbuntuDupQuestions":59.57,"MindSmallReranking":30.6,"SciDocsRR":79.91,"StackOverflowDupQuestions":49.87} -{"level_0":125,"index":167,"Rank":126,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.9,"AskUbuntuDupQuestions":59.92,"MindSmallReranking":30.97,"SciDocsRR":78.62,"StackOverflowDupQuestions":50.07} -{"level_0":126,"index":159,"Rank":127,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":54.84,"AskUbuntuDupQuestions":58.23,"MindSmallReranking":30.97,"SciDocsRR":80.74,"StackOverflowDupQuestions":49.41} -{"level_0":127,"index":245,"Rank":128,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":54.71,"AskUbuntuDupQuestions":62.86,"MindSmallReranking":29.77,"SciDocsRR":75.16,"StackOverflowDupQuestions":51.05} -{"level_0":128,"index":82,"Rank":129,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.67,"AskUbuntuDupQuestions":59.63,"MindSmallReranking":31.72,"SciDocsRR":77.72,"StackOverflowDupQuestions":49.61} -{"level_0":129,"index":174,"Rank":130,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.64,"AskUbuntuDupQuestions":60.56,"MindSmallReranking":30.4,"SciDocsRR":78.09,"StackOverflowDupQuestions":49.5} -{"level_0":130,"index":132,"Rank":131,"Model":"embedder-100p<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.63,"AskUbuntuDupQuestions":61.05,"MindSmallReranking":30.55,"SciDocsRR":79.83,"StackOverflowDupQuestions":47.1} -{"level_0":131,"index":134,"Rank":132,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":54.58,"AskUbuntuDupQuestions":63.09,"MindSmallReranking":30.89,"SciDocsRR":74.28,"StackOverflowDupQuestions":50.06} -{"level_0":132,"index":209,"Rank":133,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":54.56,"AskUbuntuDupQuestions":61.16,"MindSmallReranking":30.02,"SciDocsRR":78.05,"StackOverflowDupQuestions":49.0} -{"level_0":133,"index":267,"Rank":134,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.46,"AskUbuntuDupQuestions":57.69,"MindSmallReranking":30.32,"SciDocsRR":79.91,"StackOverflowDupQuestions":49.93} -{"level_0":134,"index":157,"Rank":135,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":54.45,"AskUbuntuDupQuestions":59.36,"MindSmallReranking":29.56,"SciDocsRR":79.76,"StackOverflowDupQuestions":49.13} -{"level_0":135,"index":104,"Rank":136,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.4,"AskUbuntuDupQuestions":60.25,"MindSmallReranking":30.26,"SciDocsRR":76.94,"StackOverflowDupQuestions":50.16} -{"level_0":136,"index":158,"Rank":137,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.32,"AskUbuntuDupQuestions":58.11,"MindSmallReranking":30.1,"SciDocsRR":79.16,"StackOverflowDupQuestions":49.93} -{"level_0":137,"index":106,"Rank":138,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.29,"AskUbuntuDupQuestions":58.08,"MindSmallReranking":31.07,"SciDocsRR":78.57,"StackOverflowDupQuestions":49.43} -{"level_0":138,"index":262,"Rank":139,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.29,"AskUbuntuDupQuestions":57.59,"MindSmallReranking":31.29,"SciDocsRR":82.14,"StackOverflowDupQuestions":46.13} -{"level_0":139,"index":166,"Rank":140,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.28,"AskUbuntuDupQuestions":59.5,"MindSmallReranking":30.51,"SciDocsRR":78.11,"StackOverflowDupQuestions":49.0} -{"level_0":140,"index":105,"Rank":141,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.28,"AskUbuntuDupQuestions":58.09,"MindSmallReranking":31.29,"SciDocsRR":78.78,"StackOverflowDupQuestions":48.96} -{"level_0":141,"index":235,"Rank":142,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":54.23,"AskUbuntuDupQuestions":60.86,"MindSmallReranking":31.33,"SciDocsRR":73.71,"StackOverflowDupQuestions":51.01} -{"level_0":142,"index":203,"Rank":143,"Model":"bge-small-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.14,"AskUbuntuDupQuestions":56.97,"MindSmallReranking":31.38,"SciDocsRR":79.46,"StackOverflowDupQuestions":48.75} -{"level_0":143,"index":244,"Rank":144,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":54.0,"AskUbuntuDupQuestions":61.51,"MindSmallReranking":30.27,"SciDocsRR":74.88,"StackOverflowDupQuestions":49.34} -{"level_0":144,"index":79,"Rank":145,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.0,"AskUbuntuDupQuestions":58.13,"MindSmallReranking":31.34,"SciDocsRR":77.21,"StackOverflowDupQuestions":49.32} -{"level_0":145,"index":65,"Rank":146,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":53.98,"AskUbuntuDupQuestions":58.6,"MindSmallReranking":29.73,"SciDocsRR":77.81,"StackOverflowDupQuestions":49.8} -{"level_0":146,"index":44,"Rank":147,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.95,"AskUbuntuDupQuestions":58.79,"MindSmallReranking":32.02,"SciDocsRR":78.54,"StackOverflowDupQuestions":46.44} -{"level_0":147,"index":162,"Rank":148,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":53.87,"AskUbuntuDupQuestions":57.88,"MindSmallReranking":30.28,"SciDocsRR":78.13,"StackOverflowDupQuestions":49.2} -{"level_0":148,"index":242,"Rank":149,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":53.8,"AskUbuntuDupQuestions":60.16,"MindSmallReranking":30.15,"SciDocsRR":78.09,"StackOverflowDupQuestions":46.79} -{"level_0":149,"index":113,"Rank":150,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.64,"AskUbuntuDupQuestions":58.54,"MindSmallReranking":30.98,"SciDocsRR":77.2,"StackOverflowDupQuestions":47.85} -{"level_0":150,"index":241,"Rank":151,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":53.62,"AskUbuntuDupQuestions":60.49,"MindSmallReranking":30.37,"SciDocsRR":77.78,"StackOverflowDupQuestions":45.85} -{"level_0":151,"index":68,"Rank":152,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.49,"AskUbuntuDupQuestions":57.49,"MindSmallReranking":31.78,"SciDocsRR":77.89,"StackOverflowDupQuestions":46.78} -{"level_0":152,"index":212,"Rank":153,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":53.42,"AskUbuntuDupQuestions":60.79,"MindSmallReranking":29.7,"SciDocsRR":75.79,"StackOverflowDupQuestions":47.42} -{"level_0":153,"index":207,"Rank":154,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.32,"AskUbuntuDupQuestions":57.59,"MindSmallReranking":31.29,"SciDocsRR":75.51,"StackOverflowDupQuestions":48.89} -{"level_0":154,"index":285,"Rank":155,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":53.27,"AskUbuntuDupQuestions":58.31,"MindSmallReranking":30.75,"SciDocsRR":75.62,"StackOverflowDupQuestions":48.4} -{"level_0":155,"index":214,"Rank":156,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":53.14,"AskUbuntuDupQuestions":56.69,"MindSmallReranking":31.58,"SciDocsRR":76.51,"StackOverflowDupQuestions":47.78} -{"level_0":156,"index":63,"Rank":157,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":53.09,"AskUbuntuDupQuestions":57.16,"MindSmallReranking":30.1,"SciDocsRR":76.28,"StackOverflowDupQuestions":48.82} -{"level_0":157,"index":243,"Rank":158,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":53.09,"AskUbuntuDupQuestions":59.73,"MindSmallReranking":30.2,"SciDocsRR":73.96,"StackOverflowDupQuestions":48.46} -{"level_0":158,"index":173,"Rank":159,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.07,"AskUbuntuDupQuestions":60.25,"MindSmallReranking":30.68,"SciDocsRR":74.16,"StackOverflowDupQuestions":47.18} -{"level_0":159,"index":61,"Rank":160,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":52.95,"AskUbuntuDupQuestions":55.56,"MindSmallReranking":30.86,"SciDocsRR":77.62,"StackOverflowDupQuestions":47.77} -{"level_0":160,"index":168,"Rank":161,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.6,"AskUbuntuDupQuestions":57.38,"MindSmallReranking":30.52,"SciDocsRR":75.13,"StackOverflowDupQuestions":47.38} -{"level_0":161,"index":84,"Rank":162,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.33,"AskUbuntuDupQuestions":55.9,"MindSmallReranking":31.11,"SciDocsRR":77.54,"StackOverflowDupQuestions":44.77} -{"level_0":162,"index":239,"Rank":163,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":51.84,"AskUbuntuDupQuestions":58.99,"MindSmallReranking":27.13,"SciDocsRR":72.78,"StackOverflowDupQuestions":48.48} -{"level_0":163,"index":77,"Rank":164,"Model":"test24<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.68,"AskUbuntuDupQuestions":56.48,"MindSmallReranking":29.57,"SciDocsRR":74.66,"StackOverflowDupQuestions":45.99} -{"level_0":164,"index":80,"Rank":165,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.58,"AskUbuntuDupQuestions":55.84,"MindSmallReranking":30.4,"SciDocsRR":71.34,"StackOverflowDupQuestions":44.74} -{"level_0":165,"index":78,"Rank":166,"Model":"test25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.55,"AskUbuntuDupQuestions":55.77,"MindSmallReranking":29.04,"SciDocsRR":72.91,"StackOverflowDupQuestions":44.49} -{"level_0":166,"index":72,"Rank":167,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.29,"AskUbuntuDupQuestions":54.09,"MindSmallReranking":30.28,"SciDocsRR":74.48,"StackOverflowDupQuestions":42.3} -{"level_0":167,"index":71,"Rank":168,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.41,"AskUbuntuDupQuestions":53.77,"MindSmallReranking":29.92,"SciDocsRR":70.82,"StackOverflowDupQuestions":43.14} -{"level_0":168,"index":277,"Rank":169,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.02,"AskUbuntuDupQuestions":53.49,"MindSmallReranking":30.71,"SciDocsRR":71.04,"StackOverflowDupQuestions":40.85} -{"level_0":169,"index":234,"Rank":170,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":48.82,"AskUbuntuDupQuestions":53.75,"MindSmallReranking":30.39,"SciDocsRR":69.22,"StackOverflowDupQuestions":41.92} -{"level_0":170,"index":70,"Rank":171,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.51,"AskUbuntuDupQuestions":52.34,"MindSmallReranking":30.09,"SciDocsRR":71.04,"StackOverflowDupQuestions":40.57} -{"level_0":171,"index":227,"Rank":172,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":48.42,"AskUbuntuDupQuestions":52.75,"MindSmallReranking":29.81,"SciDocsRR":68.72,"StackOverflowDupQuestions":42.42} -{"level_0":172,"index":231,"Rank":173,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":48.1,"AskUbuntuDupQuestions":50.07,"MindSmallReranking":24.8,"SciDocsRR":81.31,"StackOverflowDupQuestions":36.22} -{"level_0":173,"index":127,"Rank":174,"Model":"cai-lunaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.85,"AskUbuntuDupQuestions":53.44,"MindSmallReranking":28.54,"SciDocsRR":68.65,"StackOverflowDupQuestions":40.76} -{"level_0":174,"index":67,"Rank":175,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":47.7,"AskUbuntuDupQuestions":52.7,"MindSmallReranking":29.52,"SciDocsRR":67.76,"StackOverflowDupQuestions":40.82} -{"level_0":175,"index":81,"Rank":176,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.56,"AskUbuntuDupQuestions":52.63,"MindSmallReranking":29.27,"SciDocsRR":68.36,"StackOverflowDupQuestions":39.97} -{"level_0":176,"index":217,"Rank":177,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":47.54,"AskUbuntuDupQuestions":51.8,"MindSmallReranking":29.3,"SciDocsRR":70.14,"StackOverflowDupQuestions":38.9} -{"level_0":177,"index":256,"Rank":178,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.31,"AskUbuntuDupQuestions":50.09,"MindSmallReranking":29.01,"SciDocsRR":70.94,"StackOverflowDupQuestions":39.18} -{"level_0":178,"index":218,"Rank":179,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":46.47,"AskUbuntuDupQuestions":51.57,"MindSmallReranking":28.62,"SciDocsRR":66.33,"StackOverflowDupQuestions":39.35} -{"level_0":179,"index":141,"Rank":180,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.18,"AskUbuntuDupQuestions":50.89,"MindSmallReranking":26.88,"SciDocsRR":68.36,"StackOverflowDupQuestions":38.61} -{"level_0":180,"index":233,"Rank":181,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":44.75,"AskUbuntuDupQuestions":50.88,"MindSmallReranking":28.92,"SciDocsRR":63.55,"StackOverflowDupQuestions":35.65} -{"level_0":181,"index":122,"Rank":182,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":43.44,"AskUbuntuDupQuestions":45.84,"MindSmallReranking":28.37,"SciDocsRR":64.94,"StackOverflowDupQuestions":34.62} -{"level_0":182,"index":232,"Rank":183,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":43.29,"AskUbuntuDupQuestions":49.57,"MindSmallReranking":27.01,"SciDocsRR":62.56,"StackOverflowDupQuestions":34.03} -{"level_0":183,"index":11,"Rank":184,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":41.44,"AskUbuntuDupQuestions":48.99,"MindSmallReranking":24.79,"SciDocsRR":54.99,"StackOverflowDupQuestions":36.98} -{"level_0":184,"index":255,"Rank":185,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":34.44,"AskUbuntuDupQuestions":44.88,"MindSmallReranking":25.65,"SciDocsRR":43.9,"StackOverflowDupQuestions":23.35} -{"level_0":185,"index":263,"Rank":186,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":0.53,"AskUbuntuDupQuestions":0.56,"MindSmallReranking":0.31,"SciDocsRR":0.79,"StackOverflowDupQuestions":0.47} -{"level_0":186,"index":2,"Rank":187,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":"","MindSmallReranking":"","SciDocsRR":88.87,"StackOverflowDupQuestions":""} -{"level_0":187,"index":30,"Rank":197,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":53.99,"MindSmallReranking":"","SciDocsRR":"","StackOverflowDupQuestions":""} -{"level_0":188,"index":73,"Rank":217,"Model":"gte-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":55.31,"MindSmallReranking":"","SciDocsRR":"","StackOverflowDupQuestions":""} -{"level_0":189,"index":76,"Rank":220,"Model":"gte-micro-v4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":58.01,"MindSmallReranking":"","SciDocsRR":"","StackOverflowDupQuestions":""} -{"level_0":190,"index":97,"Rank":230,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":48.66,"MindSmallReranking":"","SciDocsRR":66.69,"StackOverflowDupQuestions":39.5} -{"level_0":191,"index":98,"Rank":231,"Model":"yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":59.82,"MindSmallReranking":"","SciDocsRR":"","StackOverflowDupQuestions":""} -{"level_0":192,"index":124,"Rank":235,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":"","AskUbuntuDupQuestions":61.53,"MindSmallReranking":"","SciDocsRR":"","StackOverflowDupQuestions":""} -{"level_0":193,"index":248,"Rank":264,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":57.91,"MindSmallReranking":"","SciDocsRR":70.21,"StackOverflowDupQuestions":44.9} -{"level_0":194,"index":251,"Rank":267,"Model":"Angle_BERT<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":51.34,"MindSmallReranking":"","SciDocsRR":70.59,"StackOverflowDupQuestions":40.99} -{"level_0":195,"index":270,"Rank":277,"Model":"gte-small-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":60.76,"MindSmallReranking":"","SciDocsRR":84.34,"StackOverflowDupQuestions":51.68} -{"level_0":196,"index":278,"Rank":284,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":54.68,"MindSmallReranking":"","SciDocsRR":72.78,"StackOverflowDupQuestions":40.65} -{"level_0":197,"index":279,"Rank":285,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":55.09,"MindSmallReranking":"","SciDocsRR":70.93,"StackOverflowDupQuestions":42.42} -{"level_0":198,"index":280,"Rank":286,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AskUbuntuDupQuestions":53.56,"MindSmallReranking":"","SciDocsRR":68.7,"StackOverflowDupQuestions":39.41} +{"Rank":1,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":60.13,"AskUbuntuDupQuestions":66.0,"MindSmallReranking":32.71,"SciDocsRR":87.89,"StackOverflowDupQuestions":53.93} +{"Rank":2,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.09,"AskUbuntuDupQuestions":64.92,"MindSmallReranking":30.97,"SciDocsRR":89.34,"StackOverflowDupQuestions":55.11} +{"Rank":3,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.74,"AskUbuntuDupQuestions":65.77,"MindSmallReranking":31.69,"SciDocsRR":87.03,"StackOverflowDupQuestions":54.49} +{"Rank":4,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":59.68,"AskUbuntuDupQuestions":65.19,"MindSmallReranking":32.67,"SciDocsRR":86.05,"StackOverflowDupQuestions":54.82} +{"Rank":5,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":59.36,"AskUbuntuDupQuestions":65.85,"MindSmallReranking":30.97,"SciDocsRR":88.65,"StackOverflowDupQuestions":51.98} +{"Rank":6,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.16,"AskUbuntuDupQuestions":65.03,"MindSmallReranking":29.86,"SciDocsRR":86.66,"StackOverflowDupQuestions":55.08} +{"Rank":7,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":58.9,"AskUbuntuDupQuestions":64.4,"MindSmallReranking":33.07,"SciDocsRR":83.59,"StackOverflowDupQuestions":54.56} +{"Rank":8,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":58.44,"AskUbuntuDupQuestions":64.06,"MindSmallReranking":31.02,"SciDocsRR":87.2,"StackOverflowDupQuestions":51.47} +{"Rank":9,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":58.42,"AskUbuntuDupQuestions":63.98,"MindSmallReranking":31.5,"SciDocsRR":83.8,"StackOverflowDupQuestions":54.41} +{"Rank":10,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":58.24,"AskUbuntuDupQuestions":63.24,"MindSmallReranking":31.48,"SciDocsRR":84.68,"StackOverflowDupQuestions":53.56} +{"Rank":11,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":58.04,"AskUbuntuDupQuestions":63.48,"MindSmallReranking":30.8,"SciDocsRR":87.12,"StackOverflowDupQuestions":50.76} +{"Rank":12,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.99,"AskUbuntuDupQuestions":64.61,"MindSmallReranking":29.63,"SciDocsRR":84.25,"StackOverflowDupQuestions":53.46} +{"Rank":13,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":57.78,"AskUbuntuDupQuestions":63.84,"MindSmallReranking":31.89,"SciDocsRR":81.62,"StackOverflowDupQuestions":53.76} +{"Rank":14,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":57.38,"AskUbuntuDupQuestions":63.13,"MindSmallReranking":31.34,"SciDocsRR":84.03,"StackOverflowDupQuestions":51.02} +{"Rank":15,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":57.09,"AskUbuntuDupQuestions":61.11,"MindSmallReranking":31.53,"SciDocsRR":84.78,"StackOverflowDupQuestions":50.95} +{"Rank":16,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.72,"AskUbuntuDupQuestions":62.18,"MindSmallReranking":29.93,"SciDocsRR":83.25,"StackOverflowDupQuestions":51.53} +{"Rank":17,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":56.66,"AskUbuntuDupQuestions":63.23,"MindSmallReranking":31.93,"SciDocsRR":77.96,"StackOverflowDupQuestions":53.5} +{"Rank":18,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":56.42,"AskUbuntuDupQuestions":66.16,"MindSmallReranking":30.6,"SciDocsRR":76.09,"StackOverflowDupQuestions":52.85} +{"Rank":19,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.32,"AskUbuntuDupQuestions":62.05,"MindSmallReranking":31.45,"SciDocsRR":81.22,"StackOverflowDupQuestions":50.54} +{"Rank":20,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":55.96,"AskUbuntuDupQuestions":63.08,"MindSmallReranking":31.5,"SciDocsRR":76.49,"StackOverflowDupQuestions":52.79} +{"Rank":21,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":55.96,"AskUbuntuDupQuestions":59.24,"MindSmallReranking":30.24,"SciDocsRR":84.22,"StackOverflowDupQuestions":50.14} +{"Rank":22,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":55.65,"AskUbuntuDupQuestions":61.6,"MindSmallReranking":30.34,"SciDocsRR":80.33,"StackOverflowDupQuestions":50.32} +{"Rank":23,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":55.38,"AskUbuntuDupQuestions":60.71,"MindSmallReranking":31.96,"SciDocsRR":79.23,"StackOverflowDupQuestions":49.61} +{"Rank":24,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":55.36,"AskUbuntuDupQuestions":61.64,"MindSmallReranking":31.84,"SciDocsRR":76.39,"StackOverflowDupQuestions":51.58} +{"Rank":25,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":55.18,"AskUbuntuDupQuestions":61.34,"MindSmallReranking":30.04,"SciDocsRR":79.4,"StackOverflowDupQuestions":49.95} +{"Rank":26,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":55.03,"AskUbuntuDupQuestions":59.28,"MindSmallReranking":29.28,"SciDocsRR":81.81,"StackOverflowDupQuestions":49.75} +{"Rank":27,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":54.71,"AskUbuntuDupQuestions":62.86,"MindSmallReranking":29.77,"SciDocsRR":75.16,"StackOverflowDupQuestions":51.05} +{"Rank":28,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":54.56,"AskUbuntuDupQuestions":61.16,"MindSmallReranking":30.02,"SciDocsRR":78.05,"StackOverflowDupQuestions":49.0} +{"Rank":29,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":54.23,"AskUbuntuDupQuestions":60.86,"MindSmallReranking":31.33,"SciDocsRR":73.71,"StackOverflowDupQuestions":51.01} +{"Rank":30,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":54.0,"AskUbuntuDupQuestions":61.51,"MindSmallReranking":30.27,"SciDocsRR":74.88,"StackOverflowDupQuestions":49.34} +{"Rank":31,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":53.98,"AskUbuntuDupQuestions":58.6,"MindSmallReranking":29.73,"SciDocsRR":77.81,"StackOverflowDupQuestions":49.8} +{"Rank":32,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":53.8,"AskUbuntuDupQuestions":60.16,"MindSmallReranking":30.15,"SciDocsRR":78.09,"StackOverflowDupQuestions":46.78} +{"Rank":33,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":53.62,"AskUbuntuDupQuestions":60.49,"MindSmallReranking":30.37,"SciDocsRR":77.78,"StackOverflowDupQuestions":45.85} +{"Rank":34,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":53.42,"AskUbuntuDupQuestions":60.79,"MindSmallReranking":29.7,"SciDocsRR":75.79,"StackOverflowDupQuestions":47.42} +{"Rank":35,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":53.27,"AskUbuntuDupQuestions":58.31,"MindSmallReranking":30.75,"SciDocsRR":75.62,"StackOverflowDupQuestions":48.4} +{"Rank":36,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":53.14,"AskUbuntuDupQuestions":56.69,"MindSmallReranking":31.58,"SciDocsRR":76.51,"StackOverflowDupQuestions":47.78} +{"Rank":37,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":53.09,"AskUbuntuDupQuestions":57.16,"MindSmallReranking":30.1,"SciDocsRR":76.28,"StackOverflowDupQuestions":48.82} +{"Rank":38,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":53.09,"AskUbuntuDupQuestions":59.73,"MindSmallReranking":30.2,"SciDocsRR":73.96,"StackOverflowDupQuestions":48.46} +{"Rank":39,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":52.95,"AskUbuntuDupQuestions":55.56,"MindSmallReranking":30.86,"SciDocsRR":77.62,"StackOverflowDupQuestions":47.77} +{"Rank":40,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":52.9,"AskUbuntuDupQuestions":56.42,"MindSmallReranking":29.96,"SciDocsRR":78.26,"StackOverflowDupQuestions":46.97} +{"Rank":41,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":51.84,"AskUbuntuDupQuestions":58.99,"MindSmallReranking":27.13,"SciDocsRR":72.78,"StackOverflowDupQuestions":48.48} +{"Rank":42,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.02,"AskUbuntuDupQuestions":53.49,"MindSmallReranking":30.71,"SciDocsRR":71.04,"StackOverflowDupQuestions":40.85} +{"Rank":43,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":48.82,"AskUbuntuDupQuestions":53.75,"MindSmallReranking":30.39,"SciDocsRR":69.22,"StackOverflowDupQuestions":41.92} +{"Rank":44,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":48.42,"AskUbuntuDupQuestions":52.75,"MindSmallReranking":29.81,"SciDocsRR":68.72,"StackOverflowDupQuestions":42.42} +{"Rank":45,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":48.1,"AskUbuntuDupQuestions":50.07,"MindSmallReranking":24.8,"SciDocsRR":81.31,"StackOverflowDupQuestions":36.22} +{"Rank":46,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":47.7,"AskUbuntuDupQuestions":52.7,"MindSmallReranking":29.52,"SciDocsRR":67.76,"StackOverflowDupQuestions":40.82} +{"Rank":47,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":47.54,"AskUbuntuDupQuestions":51.8,"MindSmallReranking":29.3,"SciDocsRR":70.14,"StackOverflowDupQuestions":38.9} +{"Rank":48,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":46.47,"AskUbuntuDupQuestions":51.57,"MindSmallReranking":28.62,"SciDocsRR":66.33,"StackOverflowDupQuestions":39.35} +{"Rank":49,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":44.75,"AskUbuntuDupQuestions":50.88,"MindSmallReranking":28.92,"SciDocsRR":63.55,"StackOverflowDupQuestions":35.65} +{"Rank":50,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":43.44,"AskUbuntuDupQuestions":45.84,"MindSmallReranking":28.37,"SciDocsRR":64.94,"StackOverflowDupQuestions":34.62} +{"Rank":51,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":43.29,"AskUbuntuDupQuestions":49.57,"MindSmallReranking":27.01,"SciDocsRR":62.56,"StackOverflowDupQuestions":34.03} +{"Rank":52,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":41.44,"AskUbuntuDupQuestions":48.99,"MindSmallReranking":24.79,"SciDocsRR":54.99,"StackOverflowDupQuestions":36.98} +{"Rank":53,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":88.87,"StackOverflowDupQuestions":null} +{"Rank":54,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":55,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":56,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":57,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":58,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":59,"Model":"bm25s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":60,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":61,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":62,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":63,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":64,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":65,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":66,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":67,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":68,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":69,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":70,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":71,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":72,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":73,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":74,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":75,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":76,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":77,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":78,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":79,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":80,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":81,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":82,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":83,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":84,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":85,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":86,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":87,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":88,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":89,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":90,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":91,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":92,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":93,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":94,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":95,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":96,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":97,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":98,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":99,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":100,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":101,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":102,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":103,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":104,"Model":"text-search-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":105,"Model":"text-search-ada-doc-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":106,"Model":"text-search-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":107,"Model":"text-search-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":108,"Model":"text-search-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":null,"MindSmallReranking":null,"SciDocsRR":null,"StackOverflowDupQuestions":null} +{"Rank":109,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":54.68,"MindSmallReranking":null,"SciDocsRR":72.78,"StackOverflowDupQuestions":40.65} +{"Rank":110,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":55.09,"MindSmallReranking":null,"SciDocsRR":70.93,"StackOverflowDupQuestions":42.42} +{"Rank":111,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AskUbuntuDupQuestions":53.56,"MindSmallReranking":null,"SciDocsRR":68.7,"StackOverflowDupQuestions":39.41} diff --git a/boards_data/en/data_tasks/Retrieval/default.jsonl b/boards_data/en/data_tasks/Retrieval/default.jsonl index 138c5a154bb29cc7ee8164e2948c45a1d9e2b86d..1ccead49dd7b63ded430442ce2d7dcd308d21cd8 100644 --- a/boards_data/en/data_tasks/Retrieval/default.jsonl +++ b/boards_data/en/data_tasks/Retrieval/default.jsonl @@ -1,196 +1,111 @@ -{"level_0":0,"index":21,"Rank":1,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.16,"ArguAna":83.08,"ClimateFEVER":45.43,"CQADupstackRetrieval":47.31,"DBPedia":51.63,"FEVER":92.83,"FiQA2018":59.67,"HotpotQA":85.14,"MSMARCO":46.79,"NFCorpus":41.85,"NQ":73.88,"QuoraRetrieval":90.95,"SCIDOCS":25.26,"SciFact":79.09,"Touche2020":30.48,"TRECCOVID":79.08} -{"level_0":1,"index":138,"Rank":2,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.01,"ArguAna":65.27,"ClimateFEVER":46.11,"CQADupstackRetrieval":47.75,"DBPedia":52.28,"FEVER":94.83,"FiQA2018":60.48,"HotpotQA":76.67,"MSMARCO":45.22,"NFCorpus":42.0,"NQ":71.8,"QuoraRetrieval":90.03,"SCIDOCS":26.64,"SciFact":80.09,"Touche2020":29.94,"TRECCOVID":85.98} -{"level_0":2,"index":216,"Rank":3,"Model":"NV-Retriever-v1<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":60.9,"ArguAna":68.28,"ClimateFEVER":43.47,"CQADupstackRetrieval":49.36,"DBPedia":50.82,"FEVER":93.15,"FiQA2018":61.18,"HotpotQA":79.12,"MSMARCO":44.89,"NFCorpus":45.06,"NQ":72.44,"QuoraRetrieval":88.78,"SCIDOCS":22.55,"SciFact":81.31,"Touche2020":26.6,"TRECCOVID":86.44} -{"level_0":3,"index":126,"Rank":4,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.25,"ArguAna":64.27,"ClimateFEVER":45.88,"CQADupstackRetrieval":46.43,"DBPedia":52.42,"FEVER":95.11,"FiQA2018":62.03,"HotpotQA":73.08,"MSMARCO":45.98,"NFCorpus":40.6,"NQ":67.0,"QuoraRetrieval":90.09,"SCIDOCS":28.91,"SciFact":79.06,"Touche2020":30.57,"TRECCOVID":82.26} -{"level_0":4,"index":205,"Rank":5,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.25,"ArguAna":64.27,"ClimateFEVER":45.88,"CQADupstackRetrieval":46.43,"DBPedia":52.42,"FEVER":95.11,"FiQA2018":62.03,"HotpotQA":73.08,"MSMARCO":45.98,"NFCorpus":40.6,"NQ":67.0,"QuoraRetrieval":90.09,"SCIDOCS":28.91,"SciFact":79.06,"Touche2020":30.57,"TRECCOVID":82.26} -{"level_0":5,"index":17,"Rank":6,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":60.25,"ArguAna":64.27,"ClimateFEVER":45.88,"CQADupstackRetrieval":46.43,"DBPedia":52.42,"FEVER":95.11,"FiQA2018":62.03,"HotpotQA":73.08,"MSMARCO":45.98,"NFCorpus":40.6,"NQ":67.0,"QuoraRetrieval":90.09,"SCIDOCS":28.91,"SciFact":79.06,"Touche2020":30.57,"TRECCOVID":82.26} -{"level_0":6,"index":58,"Rank":7,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":60.19,"ArguAna":69.65,"ClimateFEVER":39.11,"CQADupstackRetrieval":47.27,"DBPedia":51.32,"FEVER":92.42,"FiQA2018":61.2,"HotpotQA":76.24,"MSMARCO":45.21,"NFCorpus":41.62,"NQ":70.63,"QuoraRetrieval":90.27,"SCIDOCS":21.93,"SciFact":78.32,"Touche2020":30.61,"TRECCOVID":87.1} -{"level_0":7,"index":95,"Rank":8,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.18,"ArguAna":62.34,"ClimateFEVER":34.43,"CQADupstackRetrieval":46.11,"DBPedia":51.21,"FEVER":92.16,"FiQA2018":61.77,"HotpotQA":81.36,"MSMARCO":42.18,"NFCorpus":41.34,"NQ":73.96,"QuoraRetrieval":89.58,"SCIDOCS":24.87,"SciFact":85.91,"Touche2020":28.18,"TRECCOVID":87.27} -{"level_0":8,"index":215,"Rank":9,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Average":59.36,"ArguAna":68.2,"ClimateFEVER":34.72,"CQADupstackRetrieval":50.51,"DBPedia":48.29,"FEVER":87.77,"FiQA2018":63.1,"HotpotQA":79.92,"MSMARCO":46.49,"NFCorpus":38.04,"NQ":71.22,"QuoraRetrieval":89.21,"SCIDOCS":20.19,"SciFact":78.43,"Touche2020":28.38,"TRECCOVID":85.88} -{"level_0":9,"index":23,"Rank":10,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.24,"ArguAna":77.37,"ClimateFEVER":39.37,"CQADupstackRetrieval":47.94,"DBPedia":51.37,"FEVER":90.38,"FiQA2018":60.04,"HotpotQA":83.26,"MSMARCO":45.71,"NFCorpus":38.11,"NQ":71.45,"QuoraRetrieval":90.04,"SCIDOCS":26.93,"SciFact":72.05,"Touche2020":30.26,"TRECCOVID":64.27} -{"level_0":10,"index":96,"Rank":11,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":59.0,"ArguAna":67.17,"ClimateFEVER":36.41,"CQADupstackRetrieval":46.49,"DBPedia":49.06,"FEVER":89.35,"FiQA2018":60.4,"HotpotQA":77.02,"MSMARCO":43.41,"NFCorpus":41.88,"NQ":69.92,"QuoraRetrieval":89.78,"SCIDOCS":19.91,"SciFact":77.66,"Touche2020":29.0,"TRECCOVID":87.6} -{"level_0":11,"index":139,"Rank":12,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.97,"ArguAna":64.24,"ClimateFEVER":43.53,"CQADupstackRetrieval":44.36,"DBPedia":49.88,"FEVER":90.99,"FiQA2018":56.06,"HotpotQA":71.74,"MSMARCO":43.69,"NFCorpus":41.49,"NQ":69.07,"QuoraRetrieval":89.58,"SCIDOCS":25.04,"SciFact":78.23,"Touche2020":31.45,"TRECCOVID":85.21} -{"level_0":12,"index":25,"Rank":13,"Model":"e5-R-mistral-7b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.65,"ArguAna":58.99,"ClimateFEVER":40.26,"CQADupstackRetrieval":46.59,"DBPedia":51.03,"FEVER":90.32,"FiQA2018":58.68,"HotpotQA":80.16,"MSMARCO":42.9,"NFCorpus":41.38,"NQ":69.84,"QuoraRetrieval":90.11,"SCIDOCS":19.26,"SciFact":78.92,"Touche2020":26.89,"TRECCOVID":84.4} -{"level_0":13,"index":16,"Rank":14,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.29,"ArguAna":69.72,"ClimateFEVER":42.91,"CQADupstackRetrieval":44.76,"DBPedia":48.69,"FEVER":91.57,"FiQA2018":54.7,"HotpotQA":68.95,"MSMARCO":43.36,"NFCorpus":39.34,"NQ":64.0,"QuoraRetrieval":89.64,"SCIDOCS":24.98,"SciFact":78.44,"Touche2020":27.89,"TRECCOVID":85.38} -{"level_0":14,"index":204,"Rank":15,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.29,"ArguAna":69.72,"ClimateFEVER":42.91,"CQADupstackRetrieval":44.76,"DBPedia":48.69,"FEVER":91.57,"FiQA2018":54.7,"HotpotQA":68.95,"MSMARCO":43.36,"NFCorpus":39.34,"NQ":64.0,"QuoraRetrieval":89.64,"SCIDOCS":24.98,"SciFact":78.44,"Touche2020":27.89,"TRECCOVID":85.38} -{"level_0":15,"index":6,"Rank":16,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.28,"ArguAna":64.06,"ClimateFEVER":32.65,"CQADupstackRetrieval":46.6,"DBPedia":46.03,"FEVER":91.47,"FiQA2018":59.76,"HotpotQA":70.86,"MSMARCO":40.6,"NFCorpus":40.32,"NQ":65.92,"QuoraRetrieval":87.4,"SCIDOCS":24.32,"SciFact":79.99,"Touche2020":39.16,"TRECCOVID":85.07} -{"level_0":16,"index":51,"Rank":17,"Model":"neural-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.12,"ArguAna":67.21,"ClimateFEVER":32.3,"CQADupstackRetrieval":49.11,"DBPedia":48.05,"FEVER":89.46,"FiQA2018":58.94,"HotpotQA":78.87,"MSMARCO":42.0,"NFCorpus":42.6,"NQ":68.36,"QuoraRetrieval":89.02,"SCIDOCS":27.69,"SciFact":78.82,"Touche2020":24.06,"TRECCOVID":75.33} -{"level_0":17,"index":19,"Rank":18,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.91,"ArguAna":72.11,"ClimateFEVER":48.36,"CQADupstackRetrieval":42.16,"DBPedia":46.3,"FEVER":93.81,"FiQA2018":63.23,"HotpotQA":68.18,"MSMARCO":42.93,"NFCorpus":36.95,"NQ":56.08,"QuoraRetrieval":89.67,"SCIDOCS":26.35,"SciFact":82.43,"Touche2020":22.55,"TRECCOVID":77.49} -{"level_0":18,"index":42,"Rank":19,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":57.36,"ArguAna":63.17,"ClimateFEVER":30.91,"CQADupstackRetrieval":49.42,"DBPedia":46.6,"FEVER":82.74,"FiQA2018":59.91,"HotpotQA":79.4,"MSMARCO":41.96,"NFCorpus":40.86,"NQ":70.3,"QuoraRetrieval":89.47,"SCIDOCS":24.4,"SciFact":79.13,"Touche2020":27.81,"TRECCOVID":74.36} -{"level_0":19,"index":219,"Rank":20,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.05,"ArguAna":58.82,"ClimateFEVER":32.47,"CQADupstackRetrieval":49.52,"DBPedia":48.99,"FEVER":90.88,"FiQA2018":53.22,"HotpotQA":77.7,"MSMARCO":40.66,"NFCorpus":41.33,"NQ":64.67,"QuoraRetrieval":88.86,"SCIDOCS":23.01,"SciFact":79.62,"Touche2020":25.24,"TRECCOVID":80.75} -{"level_0":20,"index":156,"Rank":21,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":56.89,"ArguAna":61.88,"ClimateFEVER":38.35,"CQADupstackRetrieval":42.97,"DBPedia":48.89,"FEVER":87.84,"FiQA2018":56.59,"HotpotQA":75.72,"MSMARCO":43.06,"NFCorpus":38.62,"NQ":63.53,"QuoraRetrieval":89.61,"SCIDOCS":16.3,"SciFact":76.41,"Touche2020":26.39,"TRECCOVID":87.25} -{"level_0":21,"index":62,"Rank":22,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":56.63,"ArguAna":62.78,"ClimateFEVER":34.27,"CQADupstackRetrieval":48.25,"DBPedia":48.34,"FEVER":90.2,"FiQA2018":55.33,"HotpotQA":71.76,"MSMARCO":43.24,"NFCorpus":41.83,"NQ":64.21,"QuoraRetrieval":87.16,"SCIDOCS":22.96,"SciFact":78.22,"Touche2020":20.5,"TRECCOVID":80.34} -{"level_0":22,"index":9,"Rank":23,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":56.6,"ArguAna":70.28,"ClimateFEVER":31.95,"CQADupstackRetrieval":46.2,"DBPedia":39.79,"FEVER":91.35,"FiQA2018":52.51,"HotpotQA":75.51,"MSMARCO":37.93,"NFCorpus":43.7,"NQ":64.26,"QuoraRetrieval":87.62,"SCIDOCS":20.24,"SciFact":79.91,"Touche2020":26.8,"TRECCOVID":81.02} -{"level_0":23,"index":15,"Rank":24,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":56.24,"ArguAna":62.65,"ClimateFEVER":44.0,"CQADupstackRetrieval":40.64,"DBPedia":48.04,"FEVER":93.35,"FiQA2018":55.31,"HotpotQA":72.25,"MSMARCO":41.68,"NFCorpus":38.25,"NQ":61.79,"QuoraRetrieval":89.61,"SCIDOCS":27.69,"SciFact":75.31,"Touche2020":20.3,"TRECCOVID":72.72} -{"level_0":24,"index":64,"Rank":25,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":55.99,"ArguAna":57.48,"ClimateFEVER":35.19,"CQADupstackRetrieval":48.84,"DBPedia":49.58,"FEVER":89.4,"FiQA2018":53.11,"HotpotQA":74.07,"MSMARCO":42.17,"NFCorpus":39.33,"NQ":61.7,"QuoraRetrieval":87.75,"SCIDOCS":22.5,"SciFact":78.86,"Touche2020":22.18,"TRECCOVID":77.69} -{"level_0":25,"index":99,"Rank":26,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.98,"ArguAna":59.09,"ClimateFEVER":39.33,"CQADupstackRetrieval":46.97,"DBPedia":45.97,"FEVER":88.22,"FiQA2018":44.71,"HotpotQA":75.18,"MSMARCO":41.68,"NFCorpus":37.65,"NQ":63.11,"QuoraRetrieval":87.41,"SCIDOCS":21.36,"SciFact":73.82,"Touche2020":34.52,"TRECCOVID":80.72} -{"level_0":26,"index":121,"Rank":27,"Model":"snowflake-arctic-embed-l-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.98,"ArguAna":59.09,"ClimateFEVER":39.33,"CQADupstackRetrieval":46.97,"DBPedia":45.97,"FEVER":88.22,"FiQA2018":44.71,"HotpotQA":75.18,"MSMARCO":41.68,"NFCorpus":37.65,"NQ":63.11,"QuoraRetrieval":87.41,"SCIDOCS":21.36,"SciFact":73.82,"Touche2020":34.52,"TRECCOVID":80.72} -{"level_0":27,"index":1,"Rank":28,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":55.7,"ArguAna":62.18,"ClimateFEVER":33.21,"CQADupstackRetrieval":48.89,"DBPedia":47.12,"FEVER":86.96,"FiQA2018":59.24,"HotpotQA":71.33,"MSMARCO":32.58,"NFCorpus":40.33,"NQ":61.28,"QuoraRetrieval":88.18,"SCIDOCS":20.34,"SciFact":75.42,"Touche2020":25.86,"TRECCOVID":82.62} -{"level_0":28,"index":8,"Rank":29,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.58,"ArguAna":58.73,"ClimateFEVER":37.47,"CQADupstackRetrieval":45.11,"DBPedia":43.42,"FEVER":89.71,"FiQA2018":44.79,"HotpotQA":70.46,"MSMARCO":39.66,"NFCorpus":43.33,"NQ":60.65,"QuoraRetrieval":87.83,"SCIDOCS":23.19,"SciFact":73.64,"Touche2020":36.83,"TRECCOVID":78.92} -{"level_0":29,"index":178,"Rank":30,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.52,"ArguAna":58.52,"ClimateFEVER":34.56,"CQADupstackRetrieval":46.91,"DBPedia":46.83,"FEVER":91.22,"FiQA2018":54.51,"HotpotQA":76.41,"MSMARCO":43.25,"NFCorpus":39.55,"NQ":62.31,"QuoraRetrieval":89.34,"SCIDOCS":20.17,"SciFact":73.99,"Touche2020":18.52,"TRECCOVID":76.66} -{"level_0":30,"index":283,"Rank":31,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.44,"ArguAna":58.05,"ClimateFEVER":30.27,"CQADupstackRetrieval":47.54,"DBPedia":44.76,"FEVER":87.94,"FiQA2018":55.0,"HotpotQA":71.58,"MSMARCO":40.24,"NFCorpus":42.07,"NQ":61.27,"QuoraRetrieval":89.05,"SCIDOCS":23.11,"SciFact":77.77,"Touche2020":23.35,"TRECCOVID":79.56} -{"level_0":31,"index":43,"Rank":32,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.09,"ArguAna":59.49,"ClimateFEVER":28.69,"CQADupstackRetrieval":47.63,"DBPedia":46.54,"FEVER":85.02,"FiQA2018":49.89,"HotpotQA":73.83,"MSMARCO":35.55,"NFCorpus":39.05,"NQ":63.87,"QuoraRetrieval":87.7,"SCIDOCS":23.06,"SciFact":77.02,"Touche2020":27.97,"TRECCOVID":81.07} -{"level_0":32,"index":34,"Rank":33,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.0,"ArguAna":61.52,"ClimateFEVER":38.43,"CQADupstackRetrieval":41.53,"DBPedia":43.36,"FEVER":88.97,"FiQA2018":42.19,"HotpotQA":70.72,"MSMARCO":42.93,"NFCorpus":38.57,"NQ":61.56,"QuoraRetrieval":88.72,"SCIDOCS":20.31,"SciFact":71.83,"Touche2020":32.42,"TRECCOVID":81.92} -{"level_0":33,"index":100,"Rank":34,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.91,"ArguAna":56.44,"ClimateFEVER":39.37,"CQADupstackRetrieval":43.81,"DBPedia":44.73,"FEVER":89.02,"FiQA2018":42.4,"HotpotQA":73.65,"MSMARCO":41.77,"NFCorpus":36.77,"NQ":62.43,"QuoraRetrieval":87.42,"SCIDOCS":21.1,"SciFact":73.55,"Touche2020":31.47,"TRECCOVID":79.65} -{"level_0":34,"index":101,"Rank":35,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.83,"ArguAna":60.36,"ClimateFEVER":38.33,"CQADupstackRetrieval":44.21,"DBPedia":45.29,"FEVER":86.89,"FiQA2018":42.16,"HotpotQA":72.76,"MSMARCO":41.97,"NFCorpus":35.87,"NQ":62.34,"QuoraRetrieval":87.72,"SCIDOCS":20.47,"SciFact":69.96,"Touche2020":32.35,"TRECCOVID":81.7} -{"level_0":35,"index":108,"Rank":36,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.66,"ArguAna":66.15,"ClimateFEVER":38.53,"CQADupstackRetrieval":40.93,"DBPedia":44.89,"FEVER":88.24,"FiQA2018":44.84,"HotpotQA":73.13,"MSMARCO":41.4,"NFCorpus":38.65,"NQ":55.86,"QuoraRetrieval":89.02,"SCIDOCS":22.98,"SciFact":74.07,"Touche2020":24.93,"TRECCOVID":76.33} -{"level_0":36,"index":60,"Rank":37,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":54.6,"ArguAna":56.53,"ClimateFEVER":30.7,"CQADupstackRetrieval":45.94,"DBPedia":48.42,"FEVER":89.93,"FiQA2018":51.28,"HotpotQA":72.99,"MSMARCO":41.46,"NFCorpus":40.33,"NQ":61.24,"QuoraRetrieval":85.59,"SCIDOCS":21.05,"SciFact":77.3,"Touche2020":16.92,"TRECCOVID":79.25} -{"level_0":37,"index":194,"Rank":38,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.39,"ArguAna":66.02,"ClimateFEVER":36.09,"CQADupstackRetrieval":41.6,"DBPedia":44.51,"FEVER":86.91,"FiQA2018":45.27,"HotpotQA":72.03,"MSMARCO":41.26,"NFCorpus":38.64,"NQ":55.79,"QuoraRetrieval":88.98,"SCIDOCS":23.32,"SciFact":74.73,"Touche2020":25.2,"TRECCOVID":75.57} -{"level_0":38,"index":22,"Rank":39,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":54.29,"ArguAna":63.54,"ClimateFEVER":36.57,"CQADupstackRetrieval":42.23,"DBPedia":44.11,"FEVER":87.18,"FiQA2018":45.02,"HotpotQA":74.1,"MSMARCO":42.49,"NFCorpus":38.13,"NQ":55.03,"QuoraRetrieval":89.07,"SCIDOCS":22.64,"SciFact":74.61,"Touche2020":24.81,"TRECCOVID":74.82} -{"level_0":39,"index":18,"Rank":40,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.09,"ArguAna":63.49,"ClimateFEVER":40.36,"CQADupstackRetrieval":39.52,"DBPedia":39.9,"FEVER":94.81,"FiQA2018":48.65,"HotpotQA":67.75,"MSMARCO":42.62,"NFCorpus":35.88,"NQ":52.96,"QuoraRetrieval":88.42,"SCIDOCS":21.92,"SciFact":76.77,"Touche2020":25.22,"TRECCOVID":73.13} -{"level_0":40,"index":36,"Rank":41,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.84,"ArguAna":55.11,"ClimateFEVER":29.96,"CQADupstackRetrieval":40.64,"DBPedia":41.0,"FEVER":88.53,"FiQA2018":44.1,"HotpotQA":70.61,"MSMARCO":43.45,"NFCorpus":36.42,"NQ":63.41,"QuoraRetrieval":88.92,"SCIDOCS":19.34,"SciFact":70.05,"Touche2020":32.7,"TRECCOVID":83.37} -{"level_0":41,"index":53,"Rank":42,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.46,"ArguAna":66.18,"ClimateFEVER":33.13,"CQADupstackRetrieval":42.74,"DBPedia":42.84,"FEVER":85.44,"FiQA2018":44.93,"HotpotQA":72.49,"MSMARCO":41.46,"NFCorpus":37.9,"NQ":55.12,"QuoraRetrieval":89.07,"SCIDOCS":23.69,"SciFact":75.14,"Touche2020":24.42,"TRECCOVID":67.33} -{"level_0":42,"index":117,"Rank":43,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.44,"ArguAna":63.38,"ClimateFEVER":33.99,"CQADupstackRetrieval":43.44,"DBPedia":42.96,"FEVER":86.55,"FiQA2018":44.3,"HotpotQA":70.46,"MSMARCO":41.39,"NFCorpus":38.65,"NQ":56.09,"QuoraRetrieval":88.98,"SCIDOCS":24.06,"SciFact":74.72,"Touche2020":23.45,"TRECCOVID":69.13} -{"level_0":43,"index":261,"Rank":44,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.3,"ArguAna":65.99,"ClimateFEVER":30.37,"CQADupstackRetrieval":43.39,"DBPedia":43.31,"FEVER":83.58,"FiQA2018":45.01,"HotpotQA":71.26,"MSMARCO":41.63,"NFCorpus":39.13,"NQ":55.13,"QuoraRetrieval":89.12,"SCIDOCS":23.79,"SciFact":75.25,"Touche2020":23.11,"TRECCOVID":69.38} -{"level_0":44,"index":197,"Rank":45,"Model":"b1ade-embed-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.3,"ArguAna":65.99,"ClimateFEVER":30.37,"CQADupstackRetrieval":43.39,"DBPedia":43.31,"FEVER":83.58,"FiQA2018":45.01,"HotpotQA":71.26,"MSMARCO":41.63,"NFCorpus":39.13,"NQ":55.13,"QuoraRetrieval":89.12,"SCIDOCS":23.79,"SciFact":75.25,"Touche2020":23.11,"TRECCOVID":69.38} -{"level_0":45,"index":20,"Rank":46,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":53.25,"ArguAna":63.61,"ClimateFEVER":31.17,"CQADupstackRetrieval":42.35,"DBPedia":40.77,"FEVER":86.29,"FiQA2018":40.65,"HotpotQA":72.6,"MSMARCO":41.35,"NFCorpus":37.39,"NQ":54.15,"QuoraRetrieval":88.9,"SCIDOCS":21.73,"SciFact":74.04,"Touche2020":25.7,"TRECCOVID":78.07} -{"level_0":46,"index":179,"Rank":47,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.25,"ArguAna":63.61,"ClimateFEVER":31.17,"CQADupstackRetrieval":42.35,"DBPedia":40.77,"FEVER":86.29,"FiQA2018":40.65,"HotpotQA":72.6,"MSMARCO":41.35,"NFCorpus":37.39,"NQ":54.15,"QuoraRetrieval":88.9,"SCIDOCS":21.73,"SciFact":74.04,"Touche2020":25.7,"TRECCOVID":78.07} -{"level_0":47,"index":181,"Rank":48,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.25,"ArguAna":63.61,"ClimateFEVER":31.17,"CQADupstackRetrieval":42.35,"DBPedia":40.77,"FEVER":86.29,"FiQA2018":40.65,"HotpotQA":72.6,"MSMARCO":41.35,"NFCorpus":37.39,"NQ":54.15,"QuoraRetrieval":88.9,"SCIDOCS":21.73,"SciFact":74.04,"Touche2020":25.7,"TRECCOVID":78.07} -{"level_0":48,"index":180,"Rank":49,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.25,"ArguAna":63.61,"ClimateFEVER":31.17,"CQADupstackRetrieval":42.35,"DBPedia":40.77,"FEVER":86.29,"FiQA2018":40.65,"HotpotQA":72.6,"MSMARCO":41.35,"NFCorpus":37.39,"NQ":54.15,"QuoraRetrieval":88.9,"SCIDOCS":21.73,"SciFact":74.04,"Touche2020":25.7,"TRECCOVID":78.07} -{"level_0":49,"index":182,"Rank":50,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.25,"ArguAna":63.61,"ClimateFEVER":31.17,"CQADupstackRetrieval":42.35,"DBPedia":40.77,"FEVER":86.29,"FiQA2018":40.65,"HotpotQA":72.6,"MSMARCO":41.35,"NFCorpus":37.39,"NQ":54.15,"QuoraRetrieval":88.9,"SCIDOCS":21.73,"SciFact":74.04,"Touche2020":25.7,"TRECCOVID":78.07} -{"level_0":50,"index":213,"Rank":51,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.01,"ArguAna":48.01,"ClimateFEVER":41.28,"CQADupstackRetrieval":39.61,"DBPedia":43.9,"FEVER":86.34,"FiQA2018":37.46,"HotpotQA":72.62,"MSMARCO":42.53,"NFCorpus":34.67,"NQ":59.72,"QuoraRetrieval":88.0,"SCIDOCS":18.62,"SciFact":70.28,"Touche2020":29.86,"TRECCOVID":82.3} -{"level_0":51,"index":29,"Rank":52,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.81,"ArguAna":49.26,"ClimateFEVER":40.5,"CQADupstackRetrieval":38.26,"DBPedia":45.03,"FEVER":84.98,"FiQA2018":38.44,"HotpotQA":73.64,"MSMARCO":43.13,"NFCorpus":35.03,"NQ":59.38,"QuoraRetrieval":87.7,"SCIDOCS":18.29,"SciFact":70.5,"Touche2020":28.15,"TRECCOVID":79.92} -{"level_0":52,"index":26,"Rank":53,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.81,"ArguAna":49.26,"ClimateFEVER":40.5,"CQADupstackRetrieval":38.26,"DBPedia":45.03,"FEVER":84.98,"FiQA2018":38.44,"HotpotQA":73.64,"MSMARCO":43.13,"NFCorpus":35.03,"NQ":59.38,"QuoraRetrieval":87.7,"SCIDOCS":18.29,"SciFact":70.5,"Touche2020":28.15,"TRECCOVID":79.92} -{"level_0":53,"index":27,"Rank":54,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.81,"ArguAna":49.26,"ClimateFEVER":40.5,"CQADupstackRetrieval":38.26,"DBPedia":45.03,"FEVER":84.98,"FiQA2018":38.44,"HotpotQA":73.64,"MSMARCO":43.13,"NFCorpus":35.03,"NQ":59.38,"QuoraRetrieval":87.7,"SCIDOCS":18.29,"SciFact":70.5,"Touche2020":28.15,"TRECCOVID":79.92} -{"level_0":54,"index":129,"Rank":55,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":52.81,"ArguAna":49.26,"ClimateFEVER":40.5,"CQADupstackRetrieval":38.26,"DBPedia":45.03,"FEVER":84.98,"FiQA2018":38.44,"HotpotQA":73.64,"MSMARCO":43.13,"NFCorpus":35.03,"NQ":59.38,"QuoraRetrieval":87.7,"SCIDOCS":18.29,"SciFact":70.5,"Touche2020":28.15,"TRECCOVID":79.92} -{"level_0":55,"index":28,"Rank":56,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.81,"ArguAna":49.26,"ClimateFEVER":40.5,"CQADupstackRetrieval":38.26,"DBPedia":45.03,"FEVER":84.98,"FiQA2018":38.44,"HotpotQA":73.64,"MSMARCO":43.13,"NFCorpus":35.03,"NQ":59.38,"QuoraRetrieval":87.7,"SCIDOCS":18.29,"SciFact":70.5,"Touche2020":28.15,"TRECCOVID":79.92} -{"level_0":56,"index":206,"Rank":57,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":52.81,"ArguAna":49.26,"ClimateFEVER":40.5,"CQADupstackRetrieval":38.26,"DBPedia":45.03,"FEVER":84.98,"FiQA2018":38.44,"HotpotQA":73.64,"MSMARCO":43.13,"NFCorpus":35.03,"NQ":59.38,"QuoraRetrieval":87.7,"SCIDOCS":18.29,"SciFact":70.5,"Touche2020":28.15,"TRECCOVID":79.92} -{"level_0":57,"index":161,"Rank":58,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.47,"ArguAna":58.38,"ClimateFEVER":29.86,"CQADupstackRetrieval":42.71,"DBPedia":38.36,"FEVER":77.99,"FiQA2018":47.71,"HotpotQA":69.32,"MSMARCO":40.43,"NFCorpus":35.53,"NQ":57.75,"QuoraRetrieval":89.15,"SCIDOCS":18.72,"SciFact":71.85,"Touche2020":27.25,"TRECCOVID":82.0} -{"level_0":58,"index":0,"Rank":59,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":52.44,"ArguAna":56.27,"ClimateFEVER":29.35,"CQADupstackRetrieval":45.41,"DBPedia":41.91,"FEVER":82.61,"FiQA2018":55.54,"HotpotQA":64.65,"MSMARCO":31.12,"NFCorpus":37.81,"NQ":57.37,"QuoraRetrieval":87.89,"SCIDOCS":18.21,"SciFact":70.86,"Touche2020":27.4,"TRECCOVID":80.13} -{"level_0":59,"index":211,"Rank":60,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":52.4,"ArguAna":47.45,"ClimateFEVER":40.7,"CQADupstackRetrieval":39.06,"DBPedia":42.96,"FEVER":85.7,"FiQA2018":36.92,"HotpotQA":71.48,"MSMARCO":42.29,"NFCorpus":33.31,"NQ":58.83,"QuoraRetrieval":87.87,"SCIDOCS":17.88,"SciFact":70.12,"Touche2020":29.24,"TRECCOVID":82.12} -{"level_0":60,"index":115,"Rank":61,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.31,"ArguAna":62.62,"ClimateFEVER":31.49,"CQADupstackRetrieval":43.2,"DBPedia":41.7,"FEVER":86.65,"FiQA2018":40.64,"HotpotQA":68.92,"MSMARCO":40.64,"NFCorpus":37.64,"NQ":53.43,"QuoraRetrieval":88.81,"SCIDOCS":23.47,"SciFact":75.29,"Touche2020":20.58,"TRECCOVID":69.6} -{"level_0":61,"index":253,"Rank":62,"Model":"gte-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.22,"ArguAna":57.16,"ClimateFEVER":28.82,"CQADupstackRetrieval":43.18,"DBPedia":42.37,"FEVER":84.53,"FiQA2018":44.5,"HotpotQA":67.16,"MSMARCO":40.86,"NFCorpus":38.17,"NQ":54.78,"QuoraRetrieval":88.32,"SCIDOCS":23.44,"SciFact":74.27,"Touche2020":25.51,"TRECCOVID":70.22} -{"level_0":62,"index":125,"Rank":63,"Model":"NoInstruct-small-Embedding-v0-Q4_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.99,"ArguAna":57.59,"ClimateFEVER":35.2,"CQADupstackRetrieval":39.65,"DBPedia":41.02,"FEVER":87.13,"FiQA2018":40.65,"HotpotQA":66.54,"MSMARCO":40.23,"NFCorpus":34.92,"NQ":50.9,"QuoraRetrieval":88.41,"SCIDOCS":21.82,"SciFact":72.22,"Touche2020":23.48,"TRECCOVID":80.12} -{"level_0":63,"index":119,"Rank":64,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.99,"ArguAna":57.59,"ClimateFEVER":35.2,"CQADupstackRetrieval":39.65,"DBPedia":41.02,"FEVER":87.13,"FiQA2018":40.65,"HotpotQA":66.54,"MSMARCO":40.23,"NFCorpus":34.92,"NQ":50.9,"QuoraRetrieval":88.41,"SCIDOCS":21.82,"SciFact":72.22,"Touche2020":23.48,"TRECCOVID":80.12} -{"level_0":64,"index":103,"Rank":65,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.98,"ArguAna":56.87,"ClimateFEVER":31.25,"CQADupstackRetrieval":42.02,"DBPedia":41.59,"FEVER":82.49,"FiQA2018":39.68,"HotpotQA":66.59,"MSMARCO":39.79,"NFCorpus":32.54,"NQ":56.19,"QuoraRetrieval":87.47,"SCIDOCS":19.42,"SciFact":69.92,"Touche2020":32.51,"TRECCOVID":81.39} -{"level_0":65,"index":186,"Rank":66,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.92,"ArguAna":64.56,"ClimateFEVER":27.29,"CQADupstackRetrieval":42.39,"DBPedia":41.79,"FEVER":83.69,"FiQA2018":44.3,"HotpotQA":74.33,"MSMARCO":42.03,"NFCorpus":36.91,"NQ":51.77,"QuoraRetrieval":89.09,"SCIDOCS":19.58,"SciFact":73.42,"Touche2020":23.54,"TRECCOVID":64.14} -{"level_0":66,"index":170,"Rank":67,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.8,"ArguAna":64.07,"ClimateFEVER":28.74,"CQADupstackRetrieval":41.14,"DBPedia":42.51,"FEVER":81.38,"FiQA2018":42.64,"HotpotQA":72.37,"MSMARCO":40.66,"NFCorpus":38.43,"NQ":51.55,"QuoraRetrieval":88.74,"SCIDOCS":20.16,"SciFact":75.41,"Touche2020":22.6,"TRECCOVID":66.57} -{"level_0":67,"index":24,"Rank":68,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":51.68,"ArguAna":59.55,"ClimateFEVER":31.84,"CQADupstackRetrieval":39.05,"DBPedia":40.03,"FEVER":86.64,"FiQA2018":40.34,"HotpotQA":69.94,"MSMARCO":40.83,"NFCorpus":34.3,"NQ":50.18,"QuoraRetrieval":88.78,"SCIDOCS":20.52,"SciFact":71.28,"Touche2020":26.04,"TRECCOVID":75.9} -{"level_0":68,"index":282,"Rank":69,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.66,"ArguAna":55.6,"ClimateFEVER":25.8,"CQADupstackRetrieval":42.28,"DBPedia":40.8,"FEVER":84.57,"FiQA2018":50.33,"HotpotQA":62.69,"MSMARCO":37.93,"NFCorpus":37.94,"NQ":56.64,"QuoraRetrieval":88.22,"SCIDOCS":20.44,"SciFact":73.1,"Touche2020":22.31,"TRECCOVID":76.24} -{"level_0":69,"index":66,"Rank":70,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":51.44,"ArguAna":51.66,"ClimateFEVER":33.49,"CQADupstackRetrieval":41.73,"DBPedia":43.58,"FEVER":86.81,"FiQA2018":41.0,"HotpotQA":63.85,"MSMARCO":38.32,"NFCorpus":37.12,"NQ":53.89,"QuoraRetrieval":87.37,"SCIDOCS":17.96,"SciFact":72.08,"Touche2020":22.31,"TRECCOVID":80.41} -{"level_0":70,"index":207,"Rank":71,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.43,"ArguAna":54.66,"ClimateFEVER":27.01,"CQADupstackRetrieval":38.47,"DBPedia":42.07,"FEVER":77.32,"FiQA2018":39.02,"HotpotQA":64.12,"MSMARCO":43.33,"NFCorpus":35.31,"NQ":61.43,"QuoraRetrieval":88.33,"SCIDOCS":18.7,"SciFact":71.51,"Touche2020":27.86,"TRECCOVID":82.31} -{"level_0":71,"index":160,"Rank":72,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":51.43,"ArguAna":54.38,"ClimateFEVER":25.73,"CQADupstackRetrieval":39.68,"DBPedia":41.29,"FEVER":82.81,"FiQA2018":43.8,"HotpotQA":71.23,"MSMARCO":43.7,"NFCorpus":33.99,"NQ":64.06,"QuoraRetrieval":88.18,"SCIDOCS":17.47,"SciFact":70.41,"Touche2020":23.39,"TRECCOVID":71.33} -{"level_0":72,"index":193,"Rank":73,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.42,"ArguAna":63.63,"ClimateFEVER":30.73,"CQADupstackRetrieval":39.46,"DBPedia":40.74,"FEVER":82.24,"FiQA2018":41.75,"HotpotQA":63.0,"MSMARCO":38.03,"NFCorpus":37.35,"NQ":54.84,"QuoraRetrieval":88.14,"SCIDOCS":22.78,"SciFact":74.12,"Touche2020":25.89,"TRECCOVID":68.64} -{"level_0":73,"index":33,"Rank":74,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.34,"ArguAna":50.81,"ClimateFEVER":28.9,"CQADupstackRetrieval":38.89,"DBPedia":41.03,"FEVER":87.64,"FiQA2018":38.83,"HotpotQA":66.79,"MSMARCO":41.33,"NFCorpus":33.65,"NQ":57.99,"QuoraRetrieval":88.11,"SCIDOCS":18.1,"SciFact":66.69,"Touche2020":31.93,"TRECCOVID":79.36} -{"level_0":74,"index":112,"Rank":75,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.31,"ArguAna":51.31,"ClimateFEVER":22.76,"CQADupstackRetrieval":45.27,"DBPedia":36.95,"FEVER":88.17,"FiQA2018":44.8,"HotpotQA":64.15,"MSMARCO":38.0,"NFCorpus":33.94,"NQ":55.1,"QuoraRetrieval":88.41,"SCIDOCS":21.98,"SciFact":70.52,"Touche2020":27.48,"TRECCOVID":80.75} -{"level_0":75,"index":252,"Rank":76,"Model":"gte-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.14,"ArguAna":57.12,"ClimateFEVER":28.1,"CQADupstackRetrieval":42.91,"DBPedia":41.19,"FEVER":81.52,"FiQA2018":40.76,"HotpotQA":65.75,"MSMARCO":40.21,"NFCorpus":37.9,"NQ":52.84,"QuoraRetrieval":88.15,"SCIDOCS":23.13,"SciFact":76.18,"Touche2020":22.55,"TRECCOVID":68.78} -{"level_0":76,"index":284,"Rank":77,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.08,"ArguAna":55.49,"ClimateFEVER":26.86,"CQADupstackRetrieval":42.58,"DBPedia":39.97,"FEVER":79.42,"FiQA2018":44.91,"HotpotQA":63.63,"MSMARCO":37.02,"NFCorpus":38.33,"NQ":52.86,"QuoraRetrieval":88.83,"SCIDOCS":20.8,"SciFact":73.37,"Touche2020":24.28,"TRECCOVID":77.9} -{"level_0":77,"index":210,"Rank":78,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":50.81,"ArguAna":45.44,"ClimateFEVER":39.63,"CQADupstackRetrieval":37.61,"DBPedia":39.42,"FEVER":84.4,"FiQA2018":35.0,"HotpotQA":67.78,"MSMARCO":41.38,"NFCorpus":32.54,"NQ":57.1,"QuoraRetrieval":87.65,"SCIDOCS":16.76,"SciFact":68.24,"Touche2020":28.49,"TRECCOVID":80.65} -{"level_0":78,"index":250,"Rank":79,"Model":"ret-phi2-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.78,"ArguAna":48.27,"ClimateFEVER":29.61,"CQADupstackRetrieval":35.76,"DBPedia":40.51,"FEVER":80.14,"FiQA2018":41.08,"HotpotQA":63.19,"MSMARCO":42.72,"NFCorpus":37.82,"NQ":56.39,"QuoraRetrieval":88.1,"SCIDOCS":17.48,"SciFact":71.63,"Touche2020":25.66,"TRECCOVID":83.4} -{"level_0":79,"index":155,"Rank":80,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":50.56,"ArguAna":46.42,"ClimateFEVER":22.21,"CQADupstackRetrieval":37.89,"DBPedia":44.02,"FEVER":82.83,"FiQA2018":41.14,"HotpotQA":73.13,"MSMARCO":43.46,"NFCorpus":37.13,"NQ":63.44,"QuoraRetrieval":86.84,"SCIDOCS":20.51,"SciFact":72.24,"Touche2020":20.67,"TRECCOVID":66.54} -{"level_0":80,"index":118,"Rank":81,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.43,"ArguAna":59.12,"ClimateFEVER":31.83,"CQADupstackRetrieval":39.89,"DBPedia":39.76,"FEVER":86.92,"FiQA2018":39.15,"HotpotQA":65.2,"MSMARCO":39.73,"NFCorpus":34.69,"NQ":48.69,"QuoraRetrieval":88.43,"SCIDOCS":21.89,"SciFact":70.86,"Touche2020":21.19,"TRECCOVID":69.14} -{"level_0":81,"index":140,"Rank":82,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":50.29,"ArguAna":44.49,"ClimateFEVER":26.56,"CQADupstackRetrieval":38.54,"DBPedia":42.23,"FEVER":84.99,"FiQA2018":39.88,"HotpotQA":69.15,"MSMARCO":41.77,"NFCorpus":35.39,"NQ":58.22,"QuoraRetrieval":86.56,"SCIDOCS":18.69,"SciFact":71.94,"Touche2020":26.4,"TRECCOVID":69.6} -{"level_0":82,"index":153,"Rank":83,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":50.29,"ArguAna":44.49,"ClimateFEVER":26.56,"CQADupstackRetrieval":38.54,"DBPedia":42.23,"FEVER":84.99,"FiQA2018":39.88,"HotpotQA":69.15,"MSMARCO":41.77,"NFCorpus":35.39,"NQ":58.22,"QuoraRetrieval":86.56,"SCIDOCS":18.69,"SciFact":71.94,"Touche2020":26.4,"TRECCOVID":69.6} -{"level_0":83,"index":83,"Rank":84,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.25,"ArguAna":51.38,"ClimateFEVER":30.46,"CQADupstackRetrieval":39.4,"DBPedia":39.87,"FEVER":78.24,"FiQA2018":37.2,"HotpotQA":59.26,"MSMARCO":39.91,"NFCorpus":36.21,"NQ":52.41,"QuoraRetrieval":84.58,"SCIDOCS":19.87,"SciFact":74.7,"Touche2020":25.43,"TRECCOVID":84.88} -{"level_0":84,"index":35,"Rank":85,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.15,"ArguAna":53.37,"ClimateFEVER":24.8,"CQADupstackRetrieval":38.09,"DBPedia":38.05,"FEVER":85.52,"FiQA2018":36.16,"HotpotQA":66.88,"MSMARCO":40.49,"NFCorpus":32.12,"NQ":55.51,"QuoraRetrieval":87.85,"SCIDOCS":17.1,"SciFact":67.95,"Touche2020":29.48,"TRECCOVID":78.93} -{"level_0":85,"index":104,"Rank":86,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.15,"ArguAna":52.08,"ClimateFEVER":29.88,"CQADupstackRetrieval":40.12,"DBPedia":40.2,"FEVER":83.4,"FiQA2018":34.52,"HotpotQA":65.25,"MSMARCO":39.43,"NFCorpus":30.89,"NQ":54.76,"QuoraRetrieval":86.57,"SCIDOCS":18.36,"SciFact":64.51,"Touche2020":32.79,"TRECCOVID":79.43} -{"level_0":86,"index":151,"Rank":87,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.1,"ArguAna":60.63,"ClimateFEVER":29.0,"CQADupstackRetrieval":41.14,"DBPedia":39.64,"FEVER":79.13,"FiQA2018":38.62,"HotpotQA":68.22,"MSMARCO":40.95,"NFCorpus":37.51,"NQ":50.2,"QuoraRetrieval":88.72,"SCIDOCS":18.58,"SciFact":72.51,"Touche2020":21.9,"TRECCOVID":64.79} -{"level_0":87,"index":285,"Rank":88,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":50.02,"ArguAna":55.98,"ClimateFEVER":27.08,"CQADupstackRetrieval":34.27,"DBPedia":42.7,"FEVER":78.55,"FiQA2018":41.57,"HotpotQA":67.01,"MSMARCO":38.9,"NFCorpus":36.66,"NQ":55.84,"QuoraRetrieval":84.69,"SCIDOCS":16.24,"SciFact":71.8,"Touche2020":26.27,"TRECCOVID":72.72} -{"level_0":88,"index":154,"Rank":89,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":49.99,"ArguAna":49.35,"ClimateFEVER":22.4,"CQADupstackRetrieval":39.44,"DBPedia":42.39,"FEVER":65.03,"FiQA2018":38.56,"HotpotQA":63.33,"MSMARCO":44.05,"NFCorpus":36.07,"NQ":62.86,"QuoraRetrieval":88.18,"SCIDOCS":20.12,"SciFact":72.58,"Touche2020":27.21,"TRECCOVID":78.32} -{"level_0":89,"index":254,"Rank":90,"Model":"gte-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.46,"ArguAna":55.44,"ClimateFEVER":26.54,"CQADupstackRetrieval":39.98,"DBPedia":39.1,"FEVER":81.55,"FiQA2018":39.35,"HotpotQA":63.79,"MSMARCO":40.31,"NFCorpus":34.77,"NQ":50.29,"QuoraRetrieval":88.02,"SCIDOCS":21.38,"SciFact":72.7,"Touche2020":22.22,"TRECCOVID":66.53} -{"level_0":90,"index":169,"Rank":91,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.34,"ArguAna":52.21,"ClimateFEVER":26.79,"CQADupstackRetrieval":39.29,"DBPedia":37.62,"FEVER":73.98,"FiQA2018":36.7,"HotpotQA":56.66,"MSMARCO":42.02,"NFCorpus":36.02,"NQ":53.31,"QuoraRetrieval":88.28,"SCIDOCS":18.94,"SciFact":69.29,"Touche2020":25.19,"TRECCOVID":83.82} -{"level_0":91,"index":149,"Rank":92,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":49.26,"ArguAna":55.65,"ClimateFEVER":26.54,"CQADupstackRetrieval":43.09,"DBPedia":40.24,"FEVER":70.03,"FiQA2018":46.96,"HotpotQA":55.88,"MSMARCO":41.61,"NFCorpus":36.0,"NQ":57.24,"QuoraRetrieval":88.85,"SCIDOCS":17.36,"SciFact":64.56,"Touche2020":23.44,"TRECCOVID":71.4} -{"level_0":92,"index":281,"Rank":93,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.25,"ArguAna":57.44,"ClimateFEVER":21.64,"CQADupstackRetrieval":41.69,"DBPedia":39.39,"FEVER":74.99,"FiQA2018":44.41,"HotpotQA":60.9,"MSMARCO":40.91,"NFCorpus":36.97,"NQ":51.58,"QuoraRetrieval":87.6,"SCIDOCS":18.36,"SciFact":72.75,"Touche2020":21.61,"TRECCOVID":68.47} -{"level_0":93,"index":158,"Rank":94,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.04,"ArguAna":41.67,"ClimateFEVER":22.87,"CQADupstackRetrieval":37.07,"DBPedia":41.32,"FEVER":81.64,"FiQA2018":37.43,"HotpotQA":66.61,"MSMARCO":41.46,"NFCorpus":32.45,"NQ":59.11,"QuoraRetrieval":85.71,"SCIDOCS":17.77,"SciFact":68.85,"Touche2020":27.12,"TRECCOVID":74.53} -{"level_0":94,"index":159,"Rank":95,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":48.88,"ArguAna":44.23,"ClimateFEVER":23.86,"CQADupstackRetrieval":38.52,"DBPedia":40.36,"FEVER":79.44,"FiQA2018":38.17,"HotpotQA":68.56,"MSMARCO":42.27,"NFCorpus":32.46,"NQ":60.02,"QuoraRetrieval":87.65,"SCIDOCS":17.16,"SciFact":69.35,"Touche2020":21.35,"TRECCOVID":69.76} -{"level_0":95,"index":152,"Rank":96,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":48.75,"ArguAna":51.41,"ClimateFEVER":15.38,"CQADupstackRetrieval":38.92,"DBPedia":41.02,"FEVER":58.24,"FiQA2018":36.37,"HotpotQA":62.21,"MSMARCO":43.14,"NFCorpus":36.59,"NQ":59.97,"QuoraRetrieval":87.92,"SCIDOCS":18.99,"SciFact":73.08,"Touche2020":28.31,"TRECCOVID":79.64} -{"level_0":96,"index":238,"Rank":97,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":48.48,"ArguAna":53.77,"ClimateFEVER":27.21,"CQADupstackRetrieval":38.56,"DBPedia":41.28,"FEVER":74.08,"FiQA2018":46.78,"HotpotQA":59.67,"MSMARCO":44.05,"NFCorpus":34.18,"NQ":57.24,"QuoraRetrieval":89.09,"SCIDOCS":15.88,"SciFact":66.77,"Touche2020":26.76,"TRECCOVID":51.9} -{"level_0":97,"index":123,"Rank":98,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.22,"ArguAna":47.28,"ClimateFEVER":29.39,"CQADupstackRetrieval":39.62,"DBPedia":39.03,"FEVER":73.97,"FiQA2018":35.84,"HotpotQA":57.26,"MSMARCO":41.12,"NFCorpus":35.78,"NQ":53.15,"QuoraRetrieval":74.71,"SCIDOCS":18.62,"SciFact":72.11,"Touche2020":23.98,"TRECCOVID":81.37} -{"level_0":98,"index":208,"Rank":99,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.0,"ArguAna":54.81,"ClimateFEVER":24.71,"CQADupstackRetrieval":41.4,"DBPedia":40.2,"FEVER":74.39,"FiQA2018":39.86,"HotpotQA":63.7,"MSMARCO":34.99,"NFCorpus":35.68,"NQ":48.55,"QuoraRetrieval":88.19,"SCIDOCS":20.17,"SciFact":71.98,"Touche2020":19.17,"TRECCOVID":62.2} -{"level_0":99,"index":237,"Rank":100,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":47.96,"ArguAna":52.81,"ClimateFEVER":27.01,"CQADupstackRetrieval":37.35,"DBPedia":39.74,"FEVER":72.18,"FiQA2018":44.19,"HotpotQA":58.91,"MSMARCO":43.52,"NFCorpus":33.34,"NQ":56.16,"QuoraRetrieval":88.91,"SCIDOCS":15.71,"SciFact":64.2,"Touche2020":25.26,"TRECCOVID":60.09} -{"level_0":100,"index":175,"Rank":101,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":47.87,"ArguAna":44.18,"ClimateFEVER":23.53,"CQADupstackRetrieval":39.34,"DBPedia":35.05,"FEVER":72.33,"FiQA2018":41.58,"HotpotQA":61.38,"MSMARCO":40.92,"NFCorpus":32.45,"NQ":60.44,"QuoraRetrieval":88.2,"SCIDOCS":19.86,"SciFact":66.68,"Touche2020":26.24,"TRECCOVID":65.91} -{"level_0":101,"index":209,"Rank":102,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":47.75,"ArguAna":43.4,"ClimateFEVER":36.52,"CQADupstackRetrieval":34.67,"DBPedia":36.22,"FEVER":80.48,"FiQA2018":32.08,"HotpotQA":60.09,"MSMARCO":39.99,"NFCorpus":30.72,"NQ":53.62,"QuoraRetrieval":87.07,"SCIDOCS":15.56,"SciFact":64.28,"Touche2020":26.99,"TRECCOVID":74.58} -{"level_0":102,"index":167,"Rank":103,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.67,"ArguAna":48.33,"ClimateFEVER":23.36,"CQADupstackRetrieval":37.82,"DBPedia":34.54,"FEVER":71.96,"FiQA2018":35.12,"HotpotQA":55.12,"MSMARCO":40.25,"NFCorpus":33.66,"NQ":50.62,"QuoraRetrieval":88.01,"SCIDOCS":18.5,"SciFact":69.43,"Touche2020":25.17,"TRECCOVID":83.21} -{"level_0":103,"index":148,"Rank":104,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":47.57,"ArguAna":57.05,"ClimateFEVER":27.74,"CQADupstackRetrieval":43.82,"DBPedia":36.68,"FEVER":72.69,"FiQA2018":45.45,"HotpotQA":55.18,"MSMARCO":39.65,"NFCorpus":34.09,"NQ":50.1,"QuoraRetrieval":88.44,"SCIDOCS":18.55,"SciFact":64.43,"Touche2020":21.56,"TRECCOVID":58.06} -{"level_0":104,"index":236,"Rank":105,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":47.42,"ArguAna":52.09,"ClimateFEVER":26.9,"CQADupstackRetrieval":36.62,"DBPedia":39.55,"FEVER":72.66,"FiQA2018":42.79,"HotpotQA":57.85,"MSMARCO":42.73,"NFCorpus":32.63,"NQ":55.09,"QuoraRetrieval":88.47,"SCIDOCS":15.51,"SciFact":63.42,"Touche2020":28.29,"TRECCOVID":56.68} -{"level_0":105,"index":162,"Rank":106,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":46.64,"ArguAna":39.06,"ClimateFEVER":22.55,"CQADupstackRetrieval":36.07,"DBPedia":37.76,"FEVER":75.27,"FiQA2018":33.31,"HotpotQA":65.09,"MSMARCO":40.99,"NFCorpus":31.01,"NQ":56.29,"QuoraRetrieval":86.93,"SCIDOCS":13.89,"SciFact":67.7,"Touche2020":21.16,"TRECCOVID":72.57} -{"level_0":106,"index":82,"Rank":107,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.54,"ArguAna":50.49,"ClimateFEVER":27.11,"CQADupstackRetrieval":36.53,"DBPedia":34.7,"FEVER":72.73,"FiQA2018":33.29,"HotpotQA":52.84,"MSMARCO":38.83,"NFCorpus":33.89,"NQ":46.7,"QuoraRetrieval":85.6,"SCIDOCS":16.57,"SciFact":70.17,"Touche2020":23.44,"TRECCOVID":75.17} -{"level_0":107,"index":157,"Rank":108,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":46.01,"ArguAna":46.69,"ClimateFEVER":15.81,"CQADupstackRetrieval":36.08,"DBPedia":38.64,"FEVER":53.52,"FiQA2018":34.8,"HotpotQA":56.34,"MSMARCO":42.33,"NFCorpus":33.93,"NQ":58.73,"QuoraRetrieval":87.71,"SCIDOCS":16.42,"SciFact":65.6,"Touche2020":26.81,"TRECCOVID":76.78} -{"level_0":108,"index":166,"Rank":109,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.27,"ArguAna":46.67,"ClimateFEVER":25.56,"CQADupstackRetrieval":34.3,"DBPedia":32.61,"FEVER":67.22,"FiQA2018":31.29,"HotpotQA":51.67,"MSMARCO":38.27,"NFCorpus":30.7,"NQ":46.16,"QuoraRetrieval":87.01,"SCIDOCS":17.23,"SciFact":65.42,"Touche2020":24.92,"TRECCOVID":80.03} -{"level_0":109,"index":177,"Rank":110,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.14,"ArguAna":46.73,"ClimateFEVER":24.05,"CQADupstackRetrieval":38.03,"DBPedia":32.65,"FEVER":68.02,"FiQA2018":33.43,"HotpotQA":56.48,"MSMARCO":37.28,"NFCorpus":30.4,"NQ":51.59,"QuoraRetrieval":87.19,"SCIDOCS":18.61,"SciFact":63.89,"Touche2020":23.52,"TRECCOVID":65.18} -{"level_0":110,"index":116,"Rank":111,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.12,"ArguAna":51.25,"ClimateFEVER":25.16,"CQADupstackRetrieval":41.77,"DBPedia":34.83,"FEVER":73.17,"FiQA2018":37.85,"HotpotQA":52.59,"MSMARCO":36.54,"NFCorpus":31.34,"NQ":46.1,"QuoraRetrieval":88.07,"SCIDOCS":21.44,"SciFact":64.4,"Touche2020":20.79,"TRECCOVID":51.47} -{"level_0":111,"index":147,"Rank":112,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":45.12,"ArguAna":52.03,"ClimateFEVER":27.95,"CQADupstackRetrieval":40.95,"DBPedia":33.34,"FEVER":71.85,"FiQA2018":39.18,"HotpotQA":54.19,"MSMARCO":37.76,"NFCorpus":31.59,"NQ":45.88,"QuoraRetrieval":88.19,"SCIDOCS":17.09,"SciFact":57.83,"Touche2020":20.37,"TRECCOVID":58.55} -{"level_0":112,"index":185,"Rank":113,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.99,"ArguAna":39.21,"ClimateFEVER":25.02,"CQADupstackRetrieval":38.91,"DBPedia":38.79,"FEVER":78.0,"FiQA2018":45.02,"HotpotQA":57.14,"MSMARCO":36.51,"NFCorpus":31.57,"NQ":52.83,"QuoraRetrieval":87.79,"SCIDOCS":15.62,"SciFact":69.32,"Touche2020":13.87,"TRECCOVID":45.22} -{"level_0":113,"index":107,"Rank":114,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.92,"ArguAna":53.64,"ClimateFEVER":24.71,"CQADupstackRetrieval":37.8,"DBPedia":35.97,"FEVER":70.11,"FiQA2018":31.42,"HotpotQA":55.7,"MSMARCO":34.51,"NFCorpus":32.04,"NQ":43.03,"QuoraRetrieval":87.04,"SCIDOCS":19.07,"SciFact":67.51,"Touche2020":22.08,"TRECCOVID":59.21} -{"level_0":114,"index":172,"Rank":115,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.81,"ArguAna":46.48,"ClimateFEVER":21.21,"CQADupstackRetrieval":37.96,"DBPedia":34.13,"FEVER":71.9,"FiQA2018":37.27,"HotpotQA":54.95,"MSMARCO":40.34,"NFCorpus":32.24,"NQ":51.4,"QuoraRetrieval":88.09,"SCIDOCS":18.45,"SciFact":59.76,"Touche2020":20.73,"TRECCOVID":57.25} -{"level_0":115,"index":235,"Rank":116,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":44.67,"ArguAna":50.83,"ClimateFEVER":24.88,"CQADupstackRetrieval":34.55,"DBPedia":35.24,"FEVER":68.93,"FiQA2018":35.15,"HotpotQA":54.93,"MSMARCO":41.16,"NFCorpus":30.22,"NQ":50.47,"QuoraRetrieval":87.98,"SCIDOCS":14.0,"SciFact":59.74,"Touche2020":25.89,"TRECCOVID":56.05} -{"level_0":116,"index":79,"Rank":117,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.49,"ArguAna":49.68,"ClimateFEVER":26.6,"CQADupstackRetrieval":33.33,"DBPedia":31.51,"FEVER":68.12,"FiQA2018":29.99,"HotpotQA":49.93,"MSMARCO":36.05,"NFCorpus":32.08,"NQ":42.94,"QuoraRetrieval":85.28,"SCIDOCS":16.18,"SciFact":68.29,"Touche2020":24.45,"TRECCOVID":72.98} -{"level_0":117,"index":171,"Rank":118,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.03,"ArguAna":49.01,"ClimateFEVER":21.48,"CQADupstackRetrieval":37.48,"DBPedia":32.44,"FEVER":73.29,"FiQA2018":34.06,"HotpotQA":52.78,"MSMARCO":37.77,"NFCorpus":30.38,"NQ":47.88,"QuoraRetrieval":87.63,"SCIDOCS":17.63,"SciFact":59.39,"Touche2020":18.59,"TRECCOVID":60.57} -{"level_0":118,"index":69,"Rank":119,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.97,"ArguAna":55.78,"ClimateFEVER":21.23,"CQADupstackRetrieval":38.2,"DBPedia":33.88,"FEVER":63.97,"FiQA2018":30.71,"HotpotQA":54.21,"MSMARCO":33.61,"NFCorpus":32.04,"NQ":42.47,"QuoraRetrieval":86.03,"SCIDOCS":18.64,"SciFact":67.31,"Touche2020":21.12,"TRECCOVID":60.32} -{"level_0":119,"index":230,"Rank":120,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":43.81,"ArguAna":46.52,"ClimateFEVER":21.97,"CQADupstackRetrieval":44.96,"DBPedia":32.09,"FEVER":50.86,"FiQA2018":49.96,"HotpotQA":39.29,"MSMARCO":39.75,"NFCorpus":33.29,"NQ":50.45,"QuoraRetrieval":87.46,"SCIDOCS":23.76,"SciFact":65.57,"Touche2020":19.93,"TRECCOVID":51.33} -{"level_0":120,"index":184,"Rank":121,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.39,"ArguAna":40.77,"ClimateFEVER":21.84,"CQADupstackRetrieval":33.15,"DBPedia":37.47,"FEVER":61.77,"FiQA2018":39.21,"HotpotQA":55.84,"MSMARCO":35.36,"NFCorpus":31.57,"NQ":47.73,"QuoraRetrieval":87.96,"SCIDOCS":19.48,"SciFact":68.35,"Touche2020":15.22,"TRECCOVID":55.07} -{"level_0":121,"index":228,"Rank":122,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":42.69,"ArguAna":47.13,"ClimateFEVER":21.57,"CQADupstackRetrieval":42.53,"DBPedia":33.35,"FEVER":55.9,"FiQA2018":37.27,"HotpotQA":44.59,"MSMARCO":39.03,"NFCorpus":32.25,"NQ":46.47,"QuoraRetrieval":87.75,"SCIDOCS":21.82,"SciFact":62.64,"Touche2020":17.22,"TRECCOVID":50.82} -{"level_0":122,"index":106,"Rank":123,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.56,"ArguAna":55.31,"ClimateFEVER":25.35,"CQADupstackRetrieval":35.07,"DBPedia":32.25,"FEVER":74.99,"FiQA2018":25.59,"HotpotQA":53.91,"MSMARCO":31.01,"NFCorpus":31.86,"NQ":34.94,"QuoraRetrieval":85.72,"SCIDOCS":17.69,"SciFact":66.27,"Touche2020":18.1,"TRECCOVID":50.38} -{"level_0":123,"index":246,"Rank":124,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":42.24,"ArguAna":39.85,"ClimateFEVER":14.63,"CQADupstackRetrieval":44.65,"DBPedia":39.19,"FEVER":51.2,"FiQA2018":46.68,"HotpotQA":42.14,"MSMARCO":27.67,"NFCorpus":35.08,"NQ":52.87,"QuoraRetrieval":85.96,"SCIDOCS":17.17,"SciFact":55.38,"Touche2020":21.65,"TRECCOVID":59.48} -{"level_0":124,"index":229,"Rank":125,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":41.95,"ArguAna":50.17,"ClimateFEVER":20.27,"CQADupstackRetrieval":41.32,"DBPedia":32.33,"FEVER":51.93,"FiQA2018":36.87,"HotpotQA":46.51,"MSMARCO":36.54,"NFCorpus":31.59,"NQ":43.87,"QuoraRetrieval":87.56,"SCIDOCS":21.64,"SciFact":64.51,"Touche2020":16.9,"TRECCOVID":47.25} -{"level_0":125,"index":214,"Rank":126,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":41.88,"ArguAna":48.32,"ClimateFEVER":24.79,"CQADupstackRetrieval":33.67,"DBPedia":38.1,"FEVER":59.29,"FiQA2018":27.42,"HotpotQA":56.81,"MSMARCO":36.77,"NFCorpus":31.32,"NQ":41.83,"QuoraRetrieval":86.72,"SCIDOCS":17.12,"SciFact":65.51,"Touche2020":15.79,"TRECCOVID":44.77} -{"level_0":126,"index":183,"Rank":127,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.41,"ArguAna":32.93,"ClimateFEVER":25.94,"CQADupstackRetrieval":33.12,"DBPedia":35.47,"FEVER":56.41,"FiQA2018":37.75,"HotpotQA":54.66,"MSMARCO":38.29,"NFCorpus":30.16,"NQ":47.61,"QuoraRetrieval":87.85,"SCIDOCS":10.59,"SciFact":66.23,"Touche2020":13.31,"TRECCOVID":50.81} -{"level_0":127,"index":168,"Rank":128,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.19,"ArguAna":44.55,"ClimateFEVER":21.02,"CQADupstackRetrieval":31.43,"DBPedia":28.22,"FEVER":61.1,"FiQA2018":24.49,"HotpotQA":46.05,"MSMARCO":34.18,"NFCorpus":27.34,"NQ":40.05,"QuoraRetrieval":86.04,"SCIDOCS":15.31,"SciFact":61.74,"Touche2020":23.35,"TRECCOVID":72.99} -{"level_0":128,"index":134,"Rank":129,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":41.17,"ArguAna":49.11,"ClimateFEVER":23.29,"CQADupstackRetrieval":39.04,"DBPedia":32.04,"FEVER":52.63,"FiQA2018":36.35,"HotpotQA":45.66,"MSMARCO":36.83,"NFCorpus":29.67,"NQ":44.48,"QuoraRetrieval":87.21,"SCIDOCS":15.78,"SciFact":54.03,"Touche2020":15.61,"TRECCOVID":55.85} -{"level_0":129,"index":212,"Rank":130,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":40.92,"ArguAna":37.16,"ClimateFEVER":31.48,"CQADupstackRetrieval":28.72,"DBPedia":28.19,"FEVER":70.24,"FiQA2018":25.78,"HotpotQA":43.07,"MSMARCO":35.95,"NFCorpus":26.03,"NQ":45.54,"QuoraRetrieval":85.83,"SCIDOCS":12.09,"SciFact":52.71,"Touche2020":23.13,"TRECCOVID":67.83} -{"level_0":130,"index":105,"Rank":131,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.82,"ArguAna":51.51,"ClimateFEVER":24.77,"CQADupstackRetrieval":33.64,"DBPedia":31.06,"FEVER":69.64,"FiQA2018":24.37,"HotpotQA":49.73,"MSMARCO":27.85,"NFCorpus":30.04,"NQ":30.73,"QuoraRetrieval":85.29,"SCIDOCS":17.26,"SciFact":65.41,"Touche2020":18.34,"TRECCOVID":52.62} -{"level_0":131,"index":12,"Rank":132,"Model":"bm25s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.84,"ArguAna":49.28,"ClimateFEVER":13.62,"CQADupstackRetrieval":31.86,"DBPedia":29.91,"FEVER":48.09,"FiQA2018":25.14,"HotpotQA":56.91,"MSMARCO":21.89,"NFCorpus":32.08,"NQ":28.5,"QuoraRetrieval":80.42,"SCIDOCS":15.78,"SciFact":68.7,"Touche2020":33.05,"TRECCOVID":62.31} -{"level_0":132,"index":63,"Rank":133,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":39.19,"ArguAna":51.73,"ClimateFEVER":23.58,"CQADupstackRetrieval":32.4,"DBPedia":26.78,"FEVER":53.42,"FiQA2018":28.56,"HotpotQA":52.37,"MSMARCO":17.47,"NFCorpus":26.28,"NQ":37.65,"QuoraRetrieval":84.64,"SCIDOCS":10.39,"SciFact":66.36,"Touche2020":12.82,"TRECCOVID":63.34} -{"level_0":133,"index":173,"Rank":134,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.91,"ArguAna":43.57,"ClimateFEVER":17.25,"CQADupstackRetrieval":33.74,"DBPedia":28.28,"FEVER":69.12,"FiQA2018":25.19,"HotpotQA":47.48,"MSMARCO":31.8,"NFCorpus":25.96,"NQ":38.89,"QuoraRetrieval":85.69,"SCIDOCS":15.29,"SciFact":52.4,"Touche2020":16.67,"TRECCOVID":52.3} -{"level_0":134,"index":132,"Rank":135,"Model":"embedder-100p<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.79,"ArguAna":52.03,"ClimateFEVER":21.82,"CQADupstackRetrieval":34.46,"DBPedia":26.51,"FEVER":56.32,"FiQA2018":30.59,"HotpotQA":42.09,"MSMARCO":27.68,"NFCorpus":26.43,"NQ":37.93,"QuoraRetrieval":85.56,"SCIDOCS":16.6,"SciFact":59.95,"Touche2020":18.85,"TRECCOVID":45.1} -{"level_0":135,"index":245,"Rank":136,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":38.47,"ArguAna":39.4,"ClimateFEVER":10.61,"CQADupstackRetrieval":40.78,"DBPedia":33.65,"FEVER":36.12,"FiQA2018":44.71,"HotpotQA":37.17,"MSMARCO":25.17,"NFCorpus":33.18,"NQ":46.29,"QuoraRetrieval":85.85,"SCIDOCS":15.97,"SciFact":50.91,"Touche2020":22.51,"TRECCOVID":54.77} -{"level_0":136,"index":65,"Rank":137,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":38.05,"ArguAna":51.0,"ClimateFEVER":22.97,"CQADupstackRetrieval":33.37,"DBPedia":25.48,"FEVER":45.11,"FiQA2018":27.24,"HotpotQA":54.54,"MSMARCO":19.13,"NFCorpus":27.16,"NQ":34.16,"QuoraRetrieval":84.4,"SCIDOCS":15.35,"SciFact":68.68,"Touche2020":6.54,"TRECCOVID":55.67} -{"level_0":137,"index":68,"Rank":138,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.94,"ArguAna":44.8,"ClimateFEVER":17.69,"CQADupstackRetrieval":38.2,"DBPedia":32.97,"FEVER":45.91,"FiQA2018":21.29,"HotpotQA":51.52,"MSMARCO":29.49,"NFCorpus":26.78,"NQ":32.6,"QuoraRetrieval":85.3,"SCIDOCS":15.31,"SciFact":63.23,"Touche2020":16.3,"TRECCOVID":47.77} -{"level_0":138,"index":80,"Rank":139,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.04,"ArguAna":45.42,"ClimateFEVER":21.86,"CQADupstackRetrieval":27.25,"DBPedia":22.72,"FEVER":60.45,"FiQA2018":21.12,"HotpotQA":40.88,"MSMARCO":27.98,"NFCorpus":22.79,"NQ":29.73,"QuoraRetrieval":72.98,"SCIDOCS":12.21,"SciFact":56.9,"Touche2020":22.97,"TRECCOVID":70.3} -{"level_0":139,"index":44,"Rank":140,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.99,"ArguAna":49.82,"ClimateFEVER":23.22,"CQADupstackRetrieval":31.75,"DBPedia":27.77,"FEVER":59.39,"FiQA2018":25.33,"HotpotQA":41.66,"MSMARCO":24.56,"NFCorpus":24.76,"NQ":32.94,"QuoraRetrieval":84.78,"SCIDOCS":15.26,"SciFact":56.03,"Touche2020":19.0,"TRECCOVID":38.58} -{"level_0":140,"index":61,"Rank":141,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":36.75,"ArguAna":47.09,"ClimateFEVER":20.67,"CQADupstackRetrieval":30.78,"DBPedia":25.81,"FEVER":43.48,"FiQA2018":24.62,"HotpotQA":48.46,"MSMARCO":18.81,"NFCorpus":26.81,"NQ":33.21,"QuoraRetrieval":86.15,"SCIDOCS":10.0,"SciFact":64.48,"Touche2020":10.18,"TRECCOVID":60.67} -{"level_0":141,"index":244,"Rank":142,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":36.71,"ArguAna":39.27,"ClimateFEVER":11.36,"CQADupstackRetrieval":38.96,"DBPedia":31.55,"FEVER":36.21,"FiQA2018":43.55,"HotpotQA":33.95,"MSMARCO":23.96,"NFCorpus":31.1,"NQ":42.02,"QuoraRetrieval":85.73,"SCIDOCS":15.38,"SciFact":49.91,"Touche2020":21.63,"TRECCOVID":46.11} -{"level_0":142,"index":113,"Rank":143,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.08,"ArguAna":52.45,"ClimateFEVER":19.0,"CQADupstackRetrieval":30.71,"DBPedia":25.27,"FEVER":50.13,"FiQA2018":22.14,"HotpotQA":41.33,"MSMARCO":22.15,"NFCorpus":29.05,"NQ":23.45,"QuoraRetrieval":83.63,"SCIDOCS":14.95,"SciFact":61.96,"Touche2020":17.47,"TRECCOVID":47.52} -{"level_0":143,"index":242,"Rank":144,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":35.34,"ArguAna":48.91,"ClimateFEVER":15.27,"CQADupstackRetrieval":31.32,"DBPedia":26.22,"FEVER":56.76,"FiQA2018":22.96,"HotpotQA":37.03,"MSMARCO":26.6,"NFCorpus":25.49,"NQ":33.6,"QuoraRetrieval":86.4,"SCIDOCS":13.97,"SciFact":50.3,"Touche2020":17.4,"TRECCOVID":37.87} -{"level_0":144,"index":243,"Rank":145,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":33.63,"ArguAna":44.85,"ClimateFEVER":10.37,"CQADupstackRetrieval":35.23,"DBPedia":27.77,"FEVER":26.17,"FiQA2018":34.83,"HotpotQA":33.2,"MSMARCO":20.7,"NFCorpus":28.65,"NQ":36.32,"QuoraRetrieval":85.49,"SCIDOCS":14.15,"SciFact":45.76,"Touche2020":20.3,"TRECCOVID":40.7} -{"level_0":145,"index":239,"Rank":146,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":32.96,"ArguAna":45.15,"ClimateFEVER":16.96,"CQADupstackRetrieval":27.72,"DBPedia":27.86,"FEVER":45.68,"FiQA2018":15.62,"HotpotQA":35.61,"MSMARCO":29.57,"NFCorpus":22.29,"NQ":29.85,"QuoraRetrieval":86.51,"SCIDOCS":10.13,"SciFact":52.31,"Touche2020":8.57,"TRECCOVID":40.54} -{"level_0":146,"index":241,"Rank":147,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":32.45,"ArguAna":44.88,"ClimateFEVER":18.49,"CQADupstackRetrieval":30.7,"DBPedia":22.63,"FEVER":52.66,"FiQA2018":20.33,"HotpotQA":30.01,"MSMARCO":23.72,"NFCorpus":23.45,"NQ":29.8,"QuoraRetrieval":86.55,"SCIDOCS":0.03,"SciFact":48.37,"Touche2020":16.06,"TRECCOVID":39.12} -{"level_0":147,"index":84,"Rank":148,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":32.34,"ArguAna":35.07,"ClimateFEVER":17.57,"CQADupstackRetrieval":29.98,"DBPedia":26.1,"FEVER":38.64,"FiQA2018":18.59,"HotpotQA":33.99,"MSMARCO":15.83,"NFCorpus":28.26,"NQ":24.63,"QuoraRetrieval":84.68,"SCIDOCS":13.55,"SciFact":46.66,"Touche2020":16.18,"TRECCOVID":55.35} -{"level_0":148,"index":72,"Rank":149,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.33,"ArguAna":35.87,"ClimateFEVER":13.11,"CQADupstackRetrieval":21.69,"DBPedia":29.33,"FEVER":35.58,"FiQA2018":8.1,"HotpotQA":47.03,"MSMARCO":19.8,"NFCorpus":18.75,"NQ":23.25,"QuoraRetrieval":82.3,"SCIDOCS":11.12,"SciFact":50.24,"Touche2020":8.0,"TRECCOVID":35.74} -{"level_0":149,"index":70,"Rank":150,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":26.17,"ArguAna":31.14,"ClimateFEVER":10.02,"CQADupstackRetrieval":18.63,"DBPedia":25.66,"FEVER":29.18,"FiQA2018":6.42,"HotpotQA":39.88,"MSMARCO":16.53,"NFCorpus":15.64,"NQ":19.99,"QuoraRetrieval":79.94,"SCIDOCS":9.93,"SciFact":42.42,"Touche2020":7.75,"TRECCOVID":39.48} -{"level_0":150,"index":67,"Rank":151,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":25.93,"ArguAna":43.64,"ClimateFEVER":18.95,"CQADupstackRetrieval":18.5,"DBPedia":13.21,"FEVER":16.96,"FiQA2018":16.99,"HotpotQA":22.64,"MSMARCO":7.03,"NFCorpus":15.73,"NQ":17.96,"QuoraRetrieval":78.23,"SCIDOCS":5.53,"SciFact":38.31,"Touche2020":19.17,"TRECCOVID":56.04} -{"level_0":151,"index":71,"Rank":152,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":25.51,"ArguAna":34.8,"ClimateFEVER":8.35,"CQADupstackRetrieval":19.16,"DBPedia":21.32,"FEVER":26.08,"FiQA2018":11.71,"HotpotQA":38.38,"MSMARCO":16.96,"NFCorpus":14.02,"NQ":20.74,"QuoraRetrieval":74.94,"SCIDOCS":9.39,"SciFact":46.0,"Touche2020":10.51,"TRECCOVID":30.27} -{"level_0":152,"index":217,"Rank":153,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":21.82,"ArguAna":38.33,"ClimateFEVER":11.98,"CQADupstackRetrieval":14.5,"DBPedia":19.73,"FEVER":20.41,"FiQA2018":10.41,"HotpotQA":22.9,"MSMARCO":11.0,"NFCorpus":12.42,"NQ":16.08,"QuoraRetrieval":79.62,"SCIDOCS":7.53,"SciFact":29.59,"Touche2020":9.9,"TRECCOVID":22.93} -{"level_0":153,"index":232,"Rank":154,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":21.62,"ArguAna":36.3,"ClimateFEVER":14.44,"CQADupstackRetrieval":15.47,"DBPedia":18.28,"FEVER":14.99,"FiQA2018":10.09,"HotpotQA":19.18,"MSMARCO":9.6,"NFCorpus":13.87,"NQ":12.87,"QuoraRetrieval":71.32,"SCIDOCS":8.04,"SciFact":29.58,"Touche2020":13.99,"TRECCOVID":36.22} -{"level_0":154,"index":233,"Rank":155,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":21.22,"ArguAna":30.96,"ClimateFEVER":14.87,"CQADupstackRetrieval":16.79,"DBPedia":15.88,"FEVER":15.56,"FiQA2018":10.49,"HotpotQA":20.77,"MSMARCO":9.75,"NFCorpus":11.79,"NQ":12.75,"QuoraRetrieval":71.57,"SCIDOCS":8.47,"SciFact":29.53,"Touche2020":13.17,"TRECCOVID":35.92} -{"level_0":155,"index":81,"Rank":156,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":20.9,"ArguAna":31.04,"ClimateFEVER":11.01,"CQADupstackRetrieval":20.29,"DBPedia":10.87,"FEVER":18.4,"FiQA2018":8.94,"HotpotQA":17.73,"MSMARCO":6.27,"NFCorpus":11.8,"NQ":7.63,"QuoraRetrieval":78.96,"SCIDOCS":7.13,"SciFact":31.79,"Touche2020":12.27,"TRECCOVID":39.31} -{"level_0":156,"index":218,"Rank":157,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":20.29,"ArguAna":38.34,"ClimateFEVER":11.8,"CQADupstackRetrieval":13.22,"DBPedia":15.04,"FEVER":21.06,"FiQA2018":9.84,"HotpotQA":19.75,"MSMARCO":9.35,"NFCorpus":9.88,"NQ":11.69,"QuoraRetrieval":78.03,"SCIDOCS":5.5,"SciFact":25.72,"Touche2020":8.9,"TRECCOVID":26.2} -{"level_0":157,"index":227,"Rank":158,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":18.99,"ArguAna":34.18,"ClimateFEVER":3.83,"CQADupstackRetrieval":18.75,"DBPedia":15.57,"FEVER":12.18,"FiQA2018":7.0,"HotpotQA":18.75,"MSMARCO":7.6,"NFCorpus":16.54,"NQ":8.42,"QuoraRetrieval":77.03,"SCIDOCS":5.63,"SciFact":38.2,"Touche2020":4.88,"TRECCOVID":16.34} -{"level_0":158,"index":277,"Rank":159,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":18.36,"ArguAna":39.65,"ClimateFEVER":2.83,"CQADupstackRetrieval":10.17,"DBPedia":3.48,"FEVER":4.45,"FiQA2018":7.54,"HotpotQA":12.6,"MSMARCO":10.53,"NFCorpus":20.59,"NQ":2.02,"QuoraRetrieval":82.18,"SCIDOCS":6.28,"SciFact":45.46,"Touche2020":3.1,"TRECCOVID":24.56} -{"level_0":159,"index":231,"Rank":160,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":15.88,"ArguAna":32.67,"ClimateFEVER":6.86,"CQADupstackRetrieval":14.6,"DBPedia":4.14,"FEVER":5.45,"FiQA2018":5.64,"HotpotQA":5.46,"MSMARCO":5.59,"NFCorpus":0.85,"NQ":5.99,"QuoraRetrieval":64.65,"SCIDOCS":0.0,"SciFact":47.88,"Touche2020":8.46,"TRECCOVID":29.91} -{"level_0":160,"index":141,"Rank":161,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":13.47,"ArguAna":18.3,"ClimateFEVER":1.79,"CQADupstackRetrieval":8.87,"DBPedia":3.92,"FEVER":1.59,"FiQA2018":3.0,"HotpotQA":12.96,"MSMARCO":3.0,"NFCorpus":5.59,"NQ":0.89,"QuoraRetrieval":78.62,"SCIDOCS":1.79,"SciFact":35.29,"Touche2020":1.68,"TRECCOVID":24.82} -{"level_0":161,"index":122,"Rank":162,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":10.59,"ArguAna":28.29,"ClimateFEVER":5.41,"CQADupstackRetrieval":5.51,"DBPedia":4.13,"FEVER":3.3,"FiQA2018":2.19,"HotpotQA":8.26,"MSMARCO":1.91,"NFCorpus":4.3,"NQ":2.62,"QuoraRetrieval":61.03,"SCIDOCS":2.82,"SciFact":13.34,"Touche2020":0.97,"TRECCOVID":14.74} -{"level_0":162,"index":11,"Rank":163,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":7.94,"ArguAna":12.86,"ClimateFEVER":0.36,"CQADupstackRetrieval":4.12,"DBPedia":1.53,"FEVER":0.77,"FiQA2018":1.73,"HotpotQA":5.5,"MSMARCO":1.09,"NFCorpus":2.44,"NQ":0.64,"QuoraRetrieval":71.14,"SCIDOCS":0.78,"SciFact":4.04,"Touche2020":1.06,"TRECCOVID":10.97} -{"level_0":163,"index":2,"Rank":164,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":48.83,"ClimateFEVER":"","CQADupstackRetrieval":"","DBPedia":"","FEVER":"","FiQA2018":40.38,"HotpotQA":"","MSMARCO":35.19,"NFCorpus":"","NQ":51.08,"QuoraRetrieval":"","SCIDOCS":"","SciFact":73.5,"Touche2020":"","TRECCOVID":54.74} -{"level_0":164,"index":77,"Rank":196,"Model":"test24<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":44.35,"ClimateFEVER":17.77,"CQADupstackRetrieval":25.56,"DBPedia":21.94,"FEVER":"","FiQA2018":"","HotpotQA":"","MSMARCO":"","NFCorpus":"","NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":"","Touche2020":"","TRECCOVID":""} -{"level_0":165,"index":78,"Rank":197,"Model":"test25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":35.0,"ClimateFEVER":"","CQADupstackRetrieval":22.96,"DBPedia":"","FEVER":"","FiQA2018":"","HotpotQA":"","MSMARCO":"","NFCorpus":"","NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":"","Touche2020":"","TRECCOVID":""} -{"level_0":166,"index":93,"Rank":206,"Model":"e5-large-v2-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":46.42,"ClimateFEVER":22.21,"CQADupstackRetrieval":"","DBPedia":44.02,"FEVER":82.83,"FiQA2018":41.14,"HotpotQA":73.13,"MSMARCO":43.46,"NFCorpus":37.13,"NQ":63.44,"QuoraRetrieval":86.84,"SCIDOCS":20.51,"SciFact":72.24,"Touche2020":20.67,"TRECCOVID":66.54} -{"level_0":167,"index":111,"Rank":213,"Model":"UAE-Large-V1-Q5_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":66.15,"ClimateFEVER":38.53,"CQADupstackRetrieval":"","DBPedia":44.89,"FEVER":88.24,"FiQA2018":44.84,"HotpotQA":73.13,"MSMARCO":41.4,"NFCorpus":38.65,"NQ":55.86,"QuoraRetrieval":89.02,"SCIDOCS":22.98,"SciFact":74.07,"Touche2020":24.93,"TRECCOVID":76.33} -{"level_0":168,"index":114,"Rank":214,"Model":"bge-large-en-v1.5-Q4_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":63.54,"ClimateFEVER":36.57,"CQADupstackRetrieval":"","DBPedia":44.11,"FEVER":87.18,"FiQA2018":45.02,"HotpotQA":74.1,"MSMARCO":42.49,"NFCorpus":38.13,"NQ":55.03,"QuoraRetrieval":89.07,"SCIDOCS":22.64,"SciFact":74.61,"Touche2020":24.81,"TRECCOVID":74.82} -{"level_0":169,"index":120,"Rank":215,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":63.61,"ClimateFEVER":31.17,"CQADupstackRetrieval":"","DBPedia":40.77,"FEVER":86.29,"FiQA2018":40.65,"HotpotQA":72.6,"MSMARCO":41.35,"NFCorpus":37.39,"NQ":54.15,"QuoraRetrieval":88.9,"SCIDOCS":21.73,"SciFact":74.04,"Touche2020":25.7,"TRECCOVID":78.07} -{"level_0":170,"index":124,"Rank":216,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":"","ArguAna":53.96,"ClimateFEVER":"","CQADupstackRetrieval":"","DBPedia":"","FEVER":"","FiQA2018":41.3,"HotpotQA":"","MSMARCO":"","NFCorpus":31.41,"NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":64.37,"Touche2020":"","TRECCOVID":""} -{"level_0":171,"index":127,"Rank":217,"Model":"cai-lunaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":37.27,"ClimateFEVER":8.69,"CQADupstackRetrieval":18.81,"DBPedia":14.77,"FEVER":"","FiQA2018":"","HotpotQA":"","MSMARCO":"","NFCorpus":"","NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":"","Touche2020":"","TRECCOVID":""} -{"level_0":172,"index":128,"Rank":218,"Model":"cai-stellaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":47.29,"ClimateFEVER":13.48,"CQADupstackRetrieval":31.03,"DBPedia":22.45,"FEVER":"","FiQA2018":"","HotpotQA":"","MSMARCO":"","NFCorpus":"","NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":"","Touche2020":"","TRECCOVID":""} -{"level_0":173,"index":133,"Rank":221,"Model":"mxbai-embed-large-v1-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":66.02,"ClimateFEVER":36.09,"CQADupstackRetrieval":"","DBPedia":44.51,"FEVER":86.91,"FiQA2018":45.27,"HotpotQA":72.03,"MSMARCO":41.26,"NFCorpus":38.64,"NQ":55.79,"QuoraRetrieval":88.98,"SCIDOCS":23.32,"SciFact":74.73,"Touche2020":25.2,"TRECCOVID":75.57} -{"level_0":174,"index":135,"Rank":222,"Model":"jina-embeddings-v2-base-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":44.18,"ClimateFEVER":23.53,"CQADupstackRetrieval":"","DBPedia":35.05,"FEVER":72.33,"FiQA2018":41.58,"HotpotQA":61.38,"MSMARCO":40.92,"NFCorpus":32.45,"NQ":60.44,"QuoraRetrieval":88.2,"SCIDOCS":19.86,"SciFact":66.68,"Touche2020":26.24,"TRECCOVID":65.91} -{"level_0":175,"index":136,"Rank":223,"Model":"jina-embeddings-v2-small-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":46.73,"ClimateFEVER":24.05,"CQADupstackRetrieval":"","DBPedia":32.65,"FEVER":68.02,"FiQA2018":33.43,"HotpotQA":56.48,"MSMARCO":37.28,"NFCorpus":30.4,"NQ":51.59,"QuoraRetrieval":87.19,"SCIDOCS":18.61,"SciFact":63.89,"Touche2020":23.52,"TRECCOVID":65.18} -{"level_0":176,"index":137,"Rank":224,"Model":"stella-base-en-v2-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":60.63,"ClimateFEVER":29.0,"CQADupstackRetrieval":"","DBPedia":39.64,"FEVER":79.13,"FiQA2018":38.62,"HotpotQA":68.22,"MSMARCO":40.95,"NFCorpus":37.51,"NQ":50.2,"QuoraRetrieval":88.72,"SCIDOCS":18.58,"SciFact":72.51,"Touche2020":21.9,"TRECCOVID":64.79} -{"level_0":177,"index":150,"Rank":230,"Model":"bge-large-en-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":63.54,"ClimateFEVER":36.57,"CQADupstackRetrieval":"","DBPedia":44.11,"FEVER":87.18,"FiQA2018":45.02,"HotpotQA":74.1,"MSMARCO":42.49,"NFCorpus":38.13,"NQ":55.03,"QuoraRetrieval":89.07,"SCIDOCS":22.64,"SciFact":74.61,"Touche2020":24.81,"TRECCOVID":74.82} -{"level_0":178,"index":165,"Rank":233,"Model":"UAE-Large-V1-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":66.15,"ClimateFEVER":38.53,"CQADupstackRetrieval":"","DBPedia":44.89,"FEVER":88.24,"FiQA2018":44.84,"HotpotQA":73.13,"MSMARCO":41.4,"NFCorpus":38.65,"NQ":55.86,"QuoraRetrieval":89.02,"SCIDOCS":22.98,"SciFact":74.07,"Touche2020":24.93,"TRECCOVID":76.33} -{"level_0":179,"index":174,"Rank":234,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":49.33,"ClimateFEVER":21.8,"CQADupstackRetrieval":36.22,"DBPedia":31.47,"FEVER":67.89,"FiQA2018":32.3,"HotpotQA":55.39,"MSMARCO":"","NFCorpus":28.61,"NQ":48.9,"QuoraRetrieval":87.93,"SCIDOCS":16.29,"SciFact":60.68,"Touche2020":21.03,"TRECCOVID":65.12} -{"level_0":180,"index":176,"Rank":235,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":50.13,"ClimateFEVER":27.13,"CQADupstackRetrieval":38.78,"DBPedia":32.63,"FEVER":78.43,"FiQA2018":37.01,"HotpotQA":59.48,"MSMARCO":"","NFCorpus":30.3,"NQ":50.7,"QuoraRetrieval":88.14,"SCIDOCS":17.36,"SciFact":62.67,"Touche2020":19.82,"TRECCOVID":67.37} -{"level_0":181,"index":196,"Rank":243,"Model":"fin-mpnet-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":49.11,"ClimateFEVER":"","CQADupstackRetrieval":"","DBPedia":"","FEVER":"","FiQA2018":79.91,"HotpotQA":"","MSMARCO":"","NFCorpus":29.64,"NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":65.4,"Touche2020":"","TRECCOVID":""} -{"level_0":182,"index":202,"Rank":248,"Model":"bge-small-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":57.77,"ClimateFEVER":"","CQADupstackRetrieval":"","DBPedia":"","FEVER":"","FiQA2018":"","HotpotQA":"","MSMARCO":"","NFCorpus":"","NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":"","Touche2020":"","TRECCOVID":""} -{"level_0":183,"index":203,"Rank":249,"Model":"bge-small-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":49.13,"ClimateFEVER":"","CQADupstackRetrieval":"","DBPedia":"","FEVER":"","FiQA2018":"","HotpotQA":"","MSMARCO":"","NFCorpus":"","NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":"","Touche2020":"","TRECCOVID":""} -{"level_0":184,"index":248,"Rank":260,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":"","ClimateFEVER":"","CQADupstackRetrieval":"","DBPedia":"","FEVER":"","FiQA2018":"","HotpotQA":"","MSMARCO":"","NFCorpus":"","NQ":"","QuoraRetrieval":83.07,"SCIDOCS":"","SciFact":"","Touche2020":"","TRECCOVID":""} -{"level_0":185,"index":251,"Rank":262,"Model":"Angle_BERT<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":38.88,"ClimateFEVER":9.94,"CQADupstackRetrieval":18.0,"DBPedia":"","FEVER":"","FiQA2018":"","HotpotQA":"","MSMARCO":"","NFCorpus":"","NQ":"","QuoraRetrieval":"","SCIDOCS":"","SciFact":"","Touche2020":"","TRECCOVID":""} -{"level_0":186,"index":255,"Rank":263,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":5.35,"ClimateFEVER":0.59,"CQADupstackRetrieval":"","DBPedia":0.43,"FEVER":0.51,"FiQA2018":0.76,"HotpotQA":0.59,"MSMARCO":0.31,"NFCorpus":4.87,"NQ":0.33,"QuoraRetrieval":31.95,"SCIDOCS":1.02,"SciFact":2.91,"Touche2020":3.82,"TRECCOVID":7.5} -{"level_0":187,"index":256,"Rank":264,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":36.96,"ClimateFEVER":20.95,"CQADupstackRetrieval":"","DBPedia":24.25,"FEVER":29.03,"FiQA2018":13.57,"HotpotQA":33.73,"MSMARCO":9.51,"NFCorpus":21.89,"NQ":20.45,"QuoraRetrieval":67.91,"SCIDOCS":11.37,"SciFact":38.8,"Touche2020":18.78,"TRECCOVID":49.87} -{"level_0":188,"index":262,"Rank":269,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":54.83,"ClimateFEVER":31.95,"CQADupstackRetrieval":32.2,"DBPedia":41.81,"FEVER":62.94,"FiQA2018":29.36,"HotpotQA":63.85,"MSMARCO":"","NFCorpus":28.47,"NQ":42.04,"QuoraRetrieval":88.15,"SCIDOCS":17.3,"SciFact":65.72,"Touche2020":18.1,"TRECCOVID":53.89} -{"level_0":189,"index":263,"Rank":270,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":0.44,"ClimateFEVER":0.11,"CQADupstackRetrieval":"","DBPedia":0.28,"FEVER":0.25,"FiQA2018":0.16,"HotpotQA":0.3,"MSMARCO":0.2,"NFCorpus":0.23,"NQ":0.21,"QuoraRetrieval":0.84,"SCIDOCS":0.15,"SciFact":0.51,"Touche2020":0.12,"TRECCOVID":0.37} -{"level_0":190,"index":267,"Rank":274,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":46.87,"ClimateFEVER":19.68,"CQADupstackRetrieval":"","DBPedia":36.06,"FEVER":69.98,"FiQA2018":35.49,"HotpotQA":65.0,"MSMARCO":68.72,"NFCorpus":31.81,"NQ":52.15,"QuoraRetrieval":85.02,"SCIDOCS":17.36,"SciFact":67.97,"Touche2020":13.23,"TRECCOVID":52.61} -{"level_0":191,"index":272,"Rank":279,"Model":"text-search-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":46.91,"ClimateFEVER":18.5,"CQADupstackRetrieval":"","DBPedia":36.2,"FEVER":72.1,"FiQA2018":38.41,"HotpotQA":59.39,"MSMARCO":37.94,"NFCorpus":33.17,"NQ":42.81,"QuoraRetrieval":70.57,"SCIDOCS":14.83,"SciFact":67.25,"Touche2020":28.68,"TRECCOVID":72.43} -{"level_0":192,"index":274,"Rank":281,"Model":"text-search-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":49.2,"ClimateFEVER":19.9,"CQADupstackRetrieval":"","DBPedia":"","FEVER":77.0,"FiQA2018":42.2,"HotpotQA":63.1,"MSMARCO":"","NFCorpus":36.7,"NQ":"","QuoraRetrieval":69.7,"SCIDOCS":"","SciFact":70.4,"Touche2020":29.7,"TRECCOVID":58.5} -{"level_0":193,"index":275,"Rank":282,"Model":"text-search-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":46.98,"ClimateFEVER":19.4,"CQADupstackRetrieval":"","DBPedia":"","FEVER":75.6,"FiQA2018":45.21,"HotpotQA":64.8,"MSMARCO":"","NFCorpus":38.01,"NQ":"","QuoraRetrieval":67.7,"SCIDOCS":17.74,"SciFact":74.35,"Touche2020":30.9,"TRECCOVID":56.14} -{"level_0":194,"index":276,"Rank":283,"Model":"text-search-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":43.5,"ClimateFEVER":22.3,"CQADupstackRetrieval":"","DBPedia":"","FEVER":77.5,"FiQA2018":51.2,"HotpotQA":68.8,"MSMARCO":"","NFCorpus":40.7,"NQ":"","QuoraRetrieval":63.8,"SCIDOCS":"","SciFact":75.4,"Touche2020":29.1,"TRECCOVID":64.9} -{"level_0":195,"index":279,"Rank":285,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna":"","ClimateFEVER":"","CQADupstackRetrieval":"","DBPedia":"","FEVER":"","FiQA2018":5.14,"HotpotQA":"","MSMARCO":"","NFCorpus":19.96,"NQ":"","QuoraRetrieval":83.11,"SCIDOCS":"","SciFact":46.68,"Touche2020":"","TRECCOVID":7.61} +{"Rank":1,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.28,"ArguAna":64.06,"ClimateFEVER":32.65,"CQADupstackRetrieval":46.6,"DBPedia":46.03,"FEVER":91.47,"FiQA2018":59.76,"HotpotQA":70.86,"MSMARCO":40.6,"NFCorpus":40.32,"NQ":65.92,"QuoraRetrieval":87.4,"SCIDOCS":24.32,"SciFact":79.99,"Touche2020":39.16,"TRECCOVID":85.07} +{"Rank":2,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":56.63,"ArguAna":62.78,"ClimateFEVER":34.27,"CQADupstackRetrieval":48.25,"DBPedia":48.34,"FEVER":90.2,"FiQA2018":55.33,"HotpotQA":71.76,"MSMARCO":43.24,"NFCorpus":41.83,"NQ":64.21,"QuoraRetrieval":87.16,"SCIDOCS":22.96,"SciFact":78.22,"Touche2020":20.5,"TRECCOVID":80.34} +{"Rank":3,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":56.6,"ArguAna":70.28,"ClimateFEVER":31.95,"CQADupstackRetrieval":46.2,"DBPedia":39.79,"FEVER":91.35,"FiQA2018":52.51,"HotpotQA":75.51,"MSMARCO":37.93,"NFCorpus":43.7,"NQ":64.26,"QuoraRetrieval":87.62,"SCIDOCS":20.24,"SciFact":79.91,"Touche2020":26.8,"TRECCOVID":81.02} +{"Rank":4,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":56.24,"ArguAna":62.65,"ClimateFEVER":44.0,"CQADupstackRetrieval":40.64,"DBPedia":48.04,"FEVER":93.35,"FiQA2018":55.31,"HotpotQA":72.25,"MSMARCO":41.68,"NFCorpus":38.25,"NQ":61.79,"QuoraRetrieval":89.61,"SCIDOCS":27.69,"SciFact":75.31,"Touche2020":20.3,"TRECCOVID":72.72} +{"Rank":5,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":55.99,"ArguAna":57.48,"ClimateFEVER":35.19,"CQADupstackRetrieval":48.84,"DBPedia":49.58,"FEVER":89.4,"FiQA2018":53.11,"HotpotQA":74.07,"MSMARCO":42.17,"NFCorpus":39.33,"NQ":61.7,"QuoraRetrieval":87.75,"SCIDOCS":22.5,"SciFact":78.86,"Touche2020":22.18,"TRECCOVID":77.69} +{"Rank":6,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":55.7,"ArguAna":62.18,"ClimateFEVER":33.21,"CQADupstackRetrieval":48.89,"DBPedia":47.12,"FEVER":86.96,"FiQA2018":59.24,"HotpotQA":71.33,"MSMARCO":32.58,"NFCorpus":40.33,"NQ":61.28,"QuoraRetrieval":88.18,"SCIDOCS":20.34,"SciFact":75.42,"Touche2020":25.86,"TRECCOVID":82.62} +{"Rank":7,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.58,"ArguAna":58.73,"ClimateFEVER":37.47,"CQADupstackRetrieval":45.11,"DBPedia":43.42,"FEVER":89.71,"FiQA2018":44.79,"HotpotQA":70.46,"MSMARCO":39.66,"NFCorpus":43.33,"NQ":60.65,"QuoraRetrieval":87.83,"SCIDOCS":23.19,"SciFact":73.64,"Touche2020":36.83,"TRECCOVID":78.92} +{"Rank":8,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.44,"ArguAna":58.05,"ClimateFEVER":30.27,"CQADupstackRetrieval":47.54,"DBPedia":44.76,"FEVER":87.94,"FiQA2018":55.0,"HotpotQA":71.58,"MSMARCO":40.24,"NFCorpus":42.07,"NQ":61.27,"QuoraRetrieval":89.05,"SCIDOCS":23.11,"SciFact":77.77,"Touche2020":23.35,"TRECCOVID":79.56} +{"Rank":9,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":54.6,"ArguAna":56.53,"ClimateFEVER":30.7,"CQADupstackRetrieval":45.94,"DBPedia":48.42,"FEVER":89.93,"FiQA2018":51.28,"HotpotQA":72.99,"MSMARCO":41.46,"NFCorpus":40.33,"NQ":61.24,"QuoraRetrieval":85.59,"SCIDOCS":21.05,"SciFact":77.3,"Touche2020":16.92,"TRECCOVID":79.25} +{"Rank":10,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":52.44,"ArguAna":56.27,"ClimateFEVER":29.35,"CQADupstackRetrieval":45.41,"DBPedia":41.91,"FEVER":82.61,"FiQA2018":55.54,"HotpotQA":64.65,"MSMARCO":31.12,"NFCorpus":37.81,"NQ":57.37,"QuoraRetrieval":87.89,"SCIDOCS":18.21,"SciFact":70.86,"Touche2020":27.4,"TRECCOVID":80.13} +{"Rank":11,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":52.4,"ArguAna":47.45,"ClimateFEVER":40.7,"CQADupstackRetrieval":39.06,"DBPedia":42.96,"FEVER":85.7,"FiQA2018":36.92,"HotpotQA":71.48,"MSMARCO":42.29,"NFCorpus":33.31,"NQ":58.83,"QuoraRetrieval":87.87,"SCIDOCS":17.88,"SciFact":70.12,"Touche2020":29.24,"TRECCOVID":82.12} +{"Rank":12,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.66,"ArguAna":55.6,"ClimateFEVER":25.8,"CQADupstackRetrieval":42.28,"DBPedia":40.8,"FEVER":84.57,"FiQA2018":50.33,"HotpotQA":62.69,"MSMARCO":37.93,"NFCorpus":37.94,"NQ":56.64,"QuoraRetrieval":88.22,"SCIDOCS":20.44,"SciFact":73.1,"Touche2020":22.31,"TRECCOVID":76.24} +{"Rank":13,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":51.44,"ArguAna":51.66,"ClimateFEVER":33.49,"CQADupstackRetrieval":41.73,"DBPedia":43.58,"FEVER":86.81,"FiQA2018":41.0,"HotpotQA":63.85,"MSMARCO":38.32,"NFCorpus":37.12,"NQ":53.89,"QuoraRetrieval":87.37,"SCIDOCS":17.96,"SciFact":72.08,"Touche2020":22.31,"TRECCOVID":80.41} +{"Rank":14,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.08,"ArguAna":55.49,"ClimateFEVER":26.86,"CQADupstackRetrieval":42.58,"DBPedia":39.97,"FEVER":79.42,"FiQA2018":44.91,"HotpotQA":63.63,"MSMARCO":37.02,"NFCorpus":38.33,"NQ":52.86,"QuoraRetrieval":88.83,"SCIDOCS":20.8,"SciFact":73.37,"Touche2020":24.28,"TRECCOVID":77.9} +{"Rank":15,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":50.81,"ArguAna":45.44,"ClimateFEVER":39.63,"CQADupstackRetrieval":37.61,"DBPedia":39.42,"FEVER":84.4,"FiQA2018":35.0,"HotpotQA":67.78,"MSMARCO":41.38,"NFCorpus":32.54,"NQ":57.1,"QuoraRetrieval":87.65,"SCIDOCS":16.76,"SciFact":68.24,"Touche2020":28.49,"TRECCOVID":80.65} +{"Rank":16,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":50.02,"ArguAna":55.98,"ClimateFEVER":27.08,"CQADupstackRetrieval":34.27,"DBPedia":42.7,"FEVER":78.55,"FiQA2018":41.57,"HotpotQA":67.01,"MSMARCO":38.9,"NFCorpus":36.66,"NQ":55.84,"QuoraRetrieval":84.69,"SCIDOCS":16.24,"SciFact":71.8,"Touche2020":26.27,"TRECCOVID":72.72} +{"Rank":17,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.25,"ArguAna":57.44,"ClimateFEVER":21.64,"CQADupstackRetrieval":41.69,"DBPedia":39.39,"FEVER":74.99,"FiQA2018":44.41,"HotpotQA":60.9,"MSMARCO":40.91,"NFCorpus":36.97,"NQ":51.58,"QuoraRetrieval":87.6,"SCIDOCS":18.36,"SciFact":72.75,"Touche2020":21.61,"TRECCOVID":68.47} +{"Rank":18,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":48.48,"ArguAna":53.77,"ClimateFEVER":27.21,"CQADupstackRetrieval":38.56,"DBPedia":41.28,"FEVER":74.08,"FiQA2018":46.78,"HotpotQA":59.67,"MSMARCO":44.05,"NFCorpus":34.18,"NQ":57.24,"QuoraRetrieval":89.09,"SCIDOCS":15.88,"SciFact":66.77,"Touche2020":26.76,"TRECCOVID":51.9} +{"Rank":19,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":47.96,"ArguAna":52.81,"ClimateFEVER":27.01,"CQADupstackRetrieval":37.35,"DBPedia":39.74,"FEVER":72.18,"FiQA2018":44.19,"HotpotQA":58.91,"MSMARCO":43.52,"NFCorpus":33.34,"NQ":56.16,"QuoraRetrieval":88.91,"SCIDOCS":15.71,"SciFact":64.2,"Touche2020":25.26,"TRECCOVID":60.09} +{"Rank":20,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":47.75,"ArguAna":43.4,"ClimateFEVER":36.52,"CQADupstackRetrieval":34.67,"DBPedia":36.22,"FEVER":80.48,"FiQA2018":32.08,"HotpotQA":60.09,"MSMARCO":39.99,"NFCorpus":30.72,"NQ":53.62,"QuoraRetrieval":87.07,"SCIDOCS":15.56,"SciFact":64.28,"Touche2020":26.99,"TRECCOVID":74.58} +{"Rank":21,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":47.42,"ArguAna":52.09,"ClimateFEVER":26.9,"CQADupstackRetrieval":36.62,"DBPedia":39.55,"FEVER":72.66,"FiQA2018":42.79,"HotpotQA":57.85,"MSMARCO":42.73,"NFCorpus":32.63,"NQ":55.09,"QuoraRetrieval":88.47,"SCIDOCS":15.51,"SciFact":63.42,"Touche2020":28.29,"TRECCOVID":56.68} +{"Rank":22,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":44.67,"ArguAna":50.83,"ClimateFEVER":24.88,"CQADupstackRetrieval":34.55,"DBPedia":35.24,"FEVER":68.93,"FiQA2018":35.15,"HotpotQA":54.93,"MSMARCO":41.16,"NFCorpus":30.22,"NQ":50.47,"QuoraRetrieval":87.98,"SCIDOCS":14.0,"SciFact":59.74,"Touche2020":25.89,"TRECCOVID":56.05} +{"Rank":23,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":43.81,"ArguAna":46.52,"ClimateFEVER":21.97,"CQADupstackRetrieval":44.96,"DBPedia":32.09,"FEVER":50.86,"FiQA2018":49.96,"HotpotQA":39.29,"MSMARCO":39.75,"NFCorpus":33.29,"NQ":50.45,"QuoraRetrieval":87.46,"SCIDOCS":23.76,"SciFact":65.57,"Touche2020":19.93,"TRECCOVID":51.33} +{"Rank":24,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":42.69,"ArguAna":47.13,"ClimateFEVER":21.57,"CQADupstackRetrieval":42.53,"DBPedia":33.35,"FEVER":55.9,"FiQA2018":37.27,"HotpotQA":44.59,"MSMARCO":39.03,"NFCorpus":32.25,"NQ":46.47,"QuoraRetrieval":87.75,"SCIDOCS":21.82,"SciFact":62.64,"Touche2020":17.22,"TRECCOVID":50.82} +{"Rank":25,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":42.24,"ArguAna":39.85,"ClimateFEVER":14.63,"CQADupstackRetrieval":44.65,"DBPedia":39.19,"FEVER":51.2,"FiQA2018":46.68,"HotpotQA":42.14,"MSMARCO":27.67,"NFCorpus":35.08,"NQ":52.87,"QuoraRetrieval":85.96,"SCIDOCS":17.17,"SciFact":55.38,"Touche2020":21.65,"TRECCOVID":59.48} +{"Rank":26,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":41.95,"ArguAna":50.17,"ClimateFEVER":20.27,"CQADupstackRetrieval":41.32,"DBPedia":32.33,"FEVER":51.93,"FiQA2018":36.87,"HotpotQA":46.51,"MSMARCO":36.54,"NFCorpus":31.59,"NQ":43.87,"QuoraRetrieval":87.56,"SCIDOCS":21.64,"SciFact":64.51,"Touche2020":16.9,"TRECCOVID":47.25} +{"Rank":27,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":41.88,"ArguAna":48.32,"ClimateFEVER":24.79,"CQADupstackRetrieval":33.67,"DBPedia":38.1,"FEVER":59.29,"FiQA2018":27.42,"HotpotQA":56.81,"MSMARCO":36.77,"NFCorpus":31.32,"NQ":41.83,"QuoraRetrieval":86.72,"SCIDOCS":17.12,"SciFact":65.51,"Touche2020":15.79,"TRECCOVID":44.77} +{"Rank":28,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":40.92,"ArguAna":37.16,"ClimateFEVER":31.48,"CQADupstackRetrieval":28.72,"DBPedia":28.19,"FEVER":70.24,"FiQA2018":25.78,"HotpotQA":43.07,"MSMARCO":35.95,"NFCorpus":26.03,"NQ":45.54,"QuoraRetrieval":85.83,"SCIDOCS":12.09,"SciFact":52.71,"Touche2020":23.13,"TRECCOVID":67.83} +{"Rank":29,"Model":"bm25s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.84,"ArguAna":49.28,"ClimateFEVER":13.62,"CQADupstackRetrieval":31.86,"DBPedia":29.91,"FEVER":48.09,"FiQA2018":25.14,"HotpotQA":56.91,"MSMARCO":21.89,"NFCorpus":32.08,"NQ":28.5,"QuoraRetrieval":80.42,"SCIDOCS":15.78,"SciFact":68.7,"Touche2020":33.05,"TRECCOVID":62.31} +{"Rank":30,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":39.19,"ArguAna":51.73,"ClimateFEVER":23.58,"CQADupstackRetrieval":32.4,"DBPedia":26.78,"FEVER":53.42,"FiQA2018":28.56,"HotpotQA":52.37,"MSMARCO":17.47,"NFCorpus":26.28,"NQ":37.65,"QuoraRetrieval":84.64,"SCIDOCS":10.39,"SciFact":66.36,"Touche2020":12.82,"TRECCOVID":63.34} +{"Rank":31,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":38.47,"ArguAna":39.4,"ClimateFEVER":10.61,"CQADupstackRetrieval":40.78,"DBPedia":33.65,"FEVER":36.12,"FiQA2018":44.71,"HotpotQA":37.17,"MSMARCO":25.17,"NFCorpus":33.18,"NQ":46.29,"QuoraRetrieval":85.85,"SCIDOCS":15.97,"SciFact":50.91,"Touche2020":22.51,"TRECCOVID":54.77} +{"Rank":32,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":38.05,"ArguAna":51.0,"ClimateFEVER":22.97,"CQADupstackRetrieval":33.37,"DBPedia":25.48,"FEVER":45.11,"FiQA2018":27.24,"HotpotQA":54.54,"MSMARCO":19.13,"NFCorpus":27.16,"NQ":34.16,"QuoraRetrieval":84.4,"SCIDOCS":15.35,"SciFact":68.68,"Touche2020":6.54,"TRECCOVID":55.67} +{"Rank":33,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":36.75,"ArguAna":47.09,"ClimateFEVER":20.67,"CQADupstackRetrieval":30.78,"DBPedia":25.81,"FEVER":43.48,"FiQA2018":24.62,"HotpotQA":48.46,"MSMARCO":18.81,"NFCorpus":26.81,"NQ":33.21,"QuoraRetrieval":86.15,"SCIDOCS":10.0,"SciFact":64.48,"Touche2020":10.18,"TRECCOVID":60.67} +{"Rank":34,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":36.71,"ArguAna":39.27,"ClimateFEVER":11.36,"CQADupstackRetrieval":38.96,"DBPedia":31.55,"FEVER":36.21,"FiQA2018":43.55,"HotpotQA":33.95,"MSMARCO":23.96,"NFCorpus":31.1,"NQ":42.02,"QuoraRetrieval":85.73,"SCIDOCS":15.38,"SciFact":49.91,"Touche2020":21.63,"TRECCOVID":46.11} +{"Rank":35,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":35.34,"ArguAna":48.91,"ClimateFEVER":15.27,"CQADupstackRetrieval":31.32,"DBPedia":26.22,"FEVER":56.76,"FiQA2018":22.96,"HotpotQA":37.03,"MSMARCO":26.6,"NFCorpus":25.49,"NQ":33.6,"QuoraRetrieval":86.4,"SCIDOCS":13.97,"SciFact":50.3,"Touche2020":17.4,"TRECCOVID":37.87} +{"Rank":36,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":33.63,"ArguAna":44.85,"ClimateFEVER":10.37,"CQADupstackRetrieval":35.23,"DBPedia":27.77,"FEVER":26.17,"FiQA2018":34.83,"HotpotQA":33.2,"MSMARCO":20.7,"NFCorpus":28.65,"NQ":36.32,"QuoraRetrieval":85.49,"SCIDOCS":14.15,"SciFact":45.76,"Touche2020":20.3,"TRECCOVID":40.7} +{"Rank":37,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":32.96,"ArguAna":45.15,"ClimateFEVER":16.96,"CQADupstackRetrieval":27.72,"DBPedia":27.86,"FEVER":45.68,"FiQA2018":15.62,"HotpotQA":35.61,"MSMARCO":29.57,"NFCorpus":22.29,"NQ":29.85,"QuoraRetrieval":86.51,"SCIDOCS":10.13,"SciFact":52.31,"Touche2020":8.57,"TRECCOVID":40.54} +{"Rank":38,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":32.45,"ArguAna":44.88,"ClimateFEVER":18.49,"CQADupstackRetrieval":30.7,"DBPedia":22.63,"FEVER":52.66,"FiQA2018":20.33,"HotpotQA":30.01,"MSMARCO":23.72,"NFCorpus":23.45,"NQ":29.8,"QuoraRetrieval":86.55,"SCIDOCS":0.03,"SciFact":48.37,"Touche2020":16.06,"TRECCOVID":39.12} +{"Rank":39,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":25.93,"ArguAna":43.64,"ClimateFEVER":18.95,"CQADupstackRetrieval":18.5,"DBPedia":13.21,"FEVER":16.96,"FiQA2018":16.99,"HotpotQA":22.64,"MSMARCO":7.03,"NFCorpus":15.73,"NQ":17.96,"QuoraRetrieval":78.23,"SCIDOCS":5.53,"SciFact":38.31,"Touche2020":19.17,"TRECCOVID":56.04} +{"Rank":40,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":21.82,"ArguAna":38.33,"ClimateFEVER":11.98,"CQADupstackRetrieval":14.5,"DBPedia":19.73,"FEVER":20.41,"FiQA2018":10.41,"HotpotQA":22.9,"MSMARCO":11.0,"NFCorpus":12.42,"NQ":16.08,"QuoraRetrieval":79.62,"SCIDOCS":7.53,"SciFact":29.59,"Touche2020":9.9,"TRECCOVID":22.93} +{"Rank":41,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":21.62,"ArguAna":36.3,"ClimateFEVER":14.44,"CQADupstackRetrieval":15.47,"DBPedia":18.28,"FEVER":14.99,"FiQA2018":10.09,"HotpotQA":19.18,"MSMARCO":9.6,"NFCorpus":13.87,"NQ":12.87,"QuoraRetrieval":71.32,"SCIDOCS":8.04,"SciFact":29.58,"Touche2020":13.99,"TRECCOVID":36.22} +{"Rank":42,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":21.22,"ArguAna":30.96,"ClimateFEVER":14.87,"CQADupstackRetrieval":16.79,"DBPedia":15.88,"FEVER":15.56,"FiQA2018":10.49,"HotpotQA":20.77,"MSMARCO":9.75,"NFCorpus":11.79,"NQ":12.75,"QuoraRetrieval":71.57,"SCIDOCS":8.47,"SciFact":29.53,"Touche2020":13.17,"TRECCOVID":35.92} +{"Rank":43,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":20.29,"ArguAna":38.34,"ClimateFEVER":11.8,"CQADupstackRetrieval":13.22,"DBPedia":15.04,"FEVER":21.06,"FiQA2018":9.84,"HotpotQA":19.75,"MSMARCO":9.35,"NFCorpus":9.88,"NQ":11.69,"QuoraRetrieval":78.03,"SCIDOCS":5.5,"SciFact":25.72,"Touche2020":8.9,"TRECCOVID":26.2} +{"Rank":44,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":18.99,"ArguAna":34.18,"ClimateFEVER":3.83,"CQADupstackRetrieval":18.75,"DBPedia":15.57,"FEVER":12.18,"FiQA2018":7.0,"HotpotQA":18.75,"MSMARCO":7.6,"NFCorpus":16.54,"NQ":8.42,"QuoraRetrieval":77.03,"SCIDOCS":5.63,"SciFact":38.2,"Touche2020":4.88,"TRECCOVID":16.34} +{"Rank":45,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":18.36,"ArguAna":39.65,"ClimateFEVER":2.83,"CQADupstackRetrieval":10.17,"DBPedia":3.48,"FEVER":4.45,"FiQA2018":7.54,"HotpotQA":12.6,"MSMARCO":10.53,"NFCorpus":20.59,"NQ":2.02,"QuoraRetrieval":82.18,"SCIDOCS":6.28,"SciFact":45.46,"Touche2020":3.1,"TRECCOVID":24.56} +{"Rank":46,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":15.88,"ArguAna":32.67,"ClimateFEVER":6.86,"CQADupstackRetrieval":14.6,"DBPedia":4.14,"FEVER":5.45,"FiQA2018":5.64,"HotpotQA":5.46,"MSMARCO":5.59,"NFCorpus":0.85,"NQ":5.99,"QuoraRetrieval":64.65,"SCIDOCS":0.0,"SciFact":47.88,"Touche2020":8.46,"TRECCOVID":29.91} +{"Rank":47,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":10.59,"ArguAna":28.29,"ClimateFEVER":5.41,"CQADupstackRetrieval":5.51,"DBPedia":4.13,"FEVER":3.3,"FiQA2018":2.19,"HotpotQA":8.26,"MSMARCO":1.91,"NFCorpus":4.3,"NQ":2.62,"QuoraRetrieval":61.03,"SCIDOCS":2.82,"SciFact":13.34,"Touche2020":0.97,"TRECCOVID":14.74} +{"Rank":48,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":7.94,"ArguAna":12.86,"ClimateFEVER":0.36,"CQADupstackRetrieval":4.12,"DBPedia":1.53,"FEVER":0.77,"FiQA2018":1.73,"HotpotQA":5.5,"MSMARCO":1.09,"NFCorpus":2.44,"NQ":0.64,"QuoraRetrieval":71.14,"SCIDOCS":0.78,"SciFact":4.04,"Touche2020":1.06,"TRECCOVID":10.97} +{"Rank":49,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":48.83,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":40.38,"HotpotQA":null,"MSMARCO":35.19,"NFCorpus":null,"NQ":51.08,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":73.5,"Touche2020":null,"TRECCOVID":54.74} +{"Rank":50,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":51,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":52,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":53,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":54,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":55,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":56,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":57,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":58,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":59,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":60,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":61,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":62,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":63,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":64,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":65,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"ArguAna":63.17,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":59.91,"HotpotQA":null,"MSMARCO":null,"NFCorpus":40.86,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":24.4,"SciFact":79.13,"Touche2020":27.81,"TRECCOVID":74.36} +{"Rank":66,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":67,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":68,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":69,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":70,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":71,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":72,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":73,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":74,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":75,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":76,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":77,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":78,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":79,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":80,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":81,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":82,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":83,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":84,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":85,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":86,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null,"ArguAna":44.21,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":38.15,"HotpotQA":null,"MSMARCO":null,"NFCorpus":32.49,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":17.17,"SciFact":69.39,"Touche2020":21.5,"TRECCOVID":69.5} +{"Rank":87,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"ArguAna":54.36,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":43.81,"HotpotQA":null,"MSMARCO":null,"NFCorpus":33.95,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":17.45,"SciFact":70.42,"Touche2020":23.13,"TRECCOVID":71.21} +{"Rank":88,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"ArguAna":39.09,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":33.13,"HotpotQA":null,"MSMARCO":null,"NFCorpus":31.0,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":13.9,"SciFact":67.7,"Touche2020":21.16,"TRECCOVID":72.57} +{"Rank":89,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":90,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":91,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":92,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":93,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":94,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":95,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":96,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":97,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":98,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"ArguAna":32.03,"ClimateFEVER":5.56,"CQADupstackRetrieval":null,"DBPedia":9.61,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":99,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":100,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":101,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":102,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":103,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":104,"Model":"text-search-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":46.91,"ClimateFEVER":18.5,"CQADupstackRetrieval":null,"DBPedia":36.2,"FEVER":72.1,"FiQA2018":38.41,"HotpotQA":59.39,"MSMARCO":37.94,"NFCorpus":33.17,"NQ":42.81,"QuoraRetrieval":70.57,"SCIDOCS":14.83,"SciFact":67.25,"Touche2020":28.68,"TRECCOVID":72.43} +{"Rank":105,"Model":"text-search-ada-doc-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":106,"Model":"text-search-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":49.2,"ClimateFEVER":19.9,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":77.0,"FiQA2018":42.2,"HotpotQA":63.1,"MSMARCO":null,"NFCorpus":36.7,"NQ":null,"QuoraRetrieval":69.7,"SCIDOCS":null,"SciFact":70.4,"Touche2020":29.7,"TRECCOVID":58.5} +{"Rank":107,"Model":"text-search-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":46.98,"ClimateFEVER":19.4,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":75.6,"FiQA2018":45.21,"HotpotQA":64.8,"MSMARCO":null,"NFCorpus":38.01,"NQ":null,"QuoraRetrieval":67.7,"SCIDOCS":17.74,"SciFact":74.35,"Touche2020":30.9,"TRECCOVID":56.14} +{"Rank":108,"Model":"text-search-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":43.5,"ClimateFEVER":22.3,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":77.5,"FiQA2018":51.2,"HotpotQA":68.8,"MSMARCO":null,"NFCorpus":40.7,"NQ":null,"QuoraRetrieval":63.8,"SCIDOCS":null,"SciFact":75.4,"Touche2020":29.1,"TRECCOVID":64.9} +{"Rank":109,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} +{"Rank":110,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":5.14,"HotpotQA":null,"MSMARCO":null,"NFCorpus":19.96,"NQ":null,"QuoraRetrieval":83.11,"SCIDOCS":null,"SciFact":46.68,"Touche2020":null,"TRECCOVID":7.61} +{"Rank":111,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"ArguAna":null,"ClimateFEVER":null,"CQADupstackRetrieval":null,"DBPedia":null,"FEVER":null,"FiQA2018":null,"HotpotQA":null,"MSMARCO":null,"NFCorpus":null,"NQ":null,"QuoraRetrieval":null,"SCIDOCS":null,"SciFact":null,"Touche2020":null,"TRECCOVID":null} diff --git a/boards_data/en/data_tasks/STS/default.jsonl b/boards_data/en/data_tasks/STS/default.jsonl index 2c21e674058ed69d9cc373445ca18f6523ae09da..e33640bbf1120979356ea1259d31944d99c016d6 100644 --- a/boards_data/en/data_tasks/STS/default.jsonl +++ b/boards_data/en/data_tasks/STS/default.jsonl @@ -1,206 +1,111 @@ -{"level_0":0,"index":9,"Rank":1,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":85.79,"BIOSSES":89.7,"SICK-R":78.44,"STS12":86.46,"STS13":87.76,"STS14":86.6,"STS15":90.1,"STS16":86.39,"STS17 (en-en)":86.98,"STS22 (en)":76.89,"STSBenchmark":88.56} -{"level_0":1,"index":51,"Rank":2,"Model":"neural-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.24,"BIOSSES":87.98,"SICK-R":83.22,"STS12":79.4,"STS13":89.58,"STS14":84.86,"STS15":89.9,"STS16":86.31,"STS17 (en-en)":91.19,"STS22 (en)":70.08,"STSBenchmark":89.91} -{"level_0":2,"index":1,"Rank":3,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":85.07,"BIOSSES":89.46,"SICK-R":81.93,"STS12":77.59,"STS13":90.36,"STS14":85.25,"STS15":89.66,"STS16":87.34,"STS17 (en-en)":92.06,"STS22 (en)":68.02,"STSBenchmark":88.99} -{"level_0":3,"index":96,"Rank":4,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":85.05,"BIOSSES":86.07,"SICK-R":82.92,"STS12":79.47,"STS13":89.15,"STS14":84.93,"STS15":90.74,"STS16":87.82,"STS17 (en-en)":92.02,"STS22 (en)":68.36,"STSBenchmark":89.0} -{"level_0":4,"index":261,"Rank":5,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.04,"BIOSSES":89.15,"SICK-R":82.83,"STS12":78.65,"STS13":90.0,"STS14":84.97,"STS15":89.81,"STS16":86.71,"STS17 (en-en)":89.8,"STS22 (en)":69.67,"STSBenchmark":88.77} -{"level_0":5,"index":197,"Rank":6,"Model":"b1ade-embed-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.04,"BIOSSES":89.15,"SICK-R":82.83,"STS12":78.65,"STS13":90.0,"STS14":84.97,"STS15":89.81,"STS16":86.71,"STS17 (en-en)":89.8,"STS22 (en)":69.67,"STSBenchmark":88.77} -{"level_0":6,"index":194,"Rank":7,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.0,"BIOSSES":88.41,"SICK-R":82.92,"STS12":78.76,"STS13":90.35,"STS14":85.46,"STS15":89.62,"STS16":86.58,"STS17 (en-en)":89.5,"STS22 (en)":69.34,"STSBenchmark":89.1} -{"level_0":7,"index":133,"Rank":8,"Model":"mxbai-embed-large-v1-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.0,"BIOSSES":88.41,"SICK-R":82.92,"STS12":78.76,"STS13":90.35,"STS14":85.46,"STS15":89.62,"STS16":86.58,"STS17 (en-en)":89.5,"STS22 (en)":69.34,"STSBenchmark":89.1} -{"level_0":8,"index":53,"Rank":9,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.99,"BIOSSES":88.38,"SICK-R":83.0,"STS12":79.22,"STS13":89.43,"STS14":84.79,"STS15":89.54,"STS16":86.69,"STS17 (en-en)":89.64,"STS22 (en)":70.26,"STSBenchmark":88.96} -{"level_0":9,"index":58,"Rank":10,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":84.97,"BIOSSES":86.4,"SICK-R":84.31,"STS12":78.44,"STS13":88.27,"STS14":84.49,"STS15":90.28,"STS16":87.37,"STS17 (en-en)":92.68,"STS22 (en)":68.62,"STSBenchmark":88.81} -{"level_0":10,"index":0,"Rank":11,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":84.93,"BIOSSES":89.42,"SICK-R":81.67,"STS12":78.02,"STS13":90.1,"STS14":85.44,"STS15":89.64,"STS16":87.24,"STS17 (en-en)":90.46,"STS22 (en)":67.99,"STSBenchmark":89.33} -{"level_0":11,"index":193,"Rank":12,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.9,"BIOSSES":88.06,"SICK-R":82.05,"STS12":78.77,"STS13":90.4,"STS14":85.45,"STS15":90.01,"STS16":87.42,"STS17 (en-en)":88.8,"STS22 (en)":68.8,"STSBenchmark":89.2} -{"level_0":12,"index":219,"Rank":13,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.82,"BIOSSES":87.28,"SICK-R":82.33,"STS12":79.7,"STS13":89.21,"STS14":86.01,"STS15":89.7,"STS16":87.68,"STS17 (en-en)":88.03,"STS22 (en)":70.16,"STSBenchmark":88.15} -{"level_0":13,"index":161,"Rank":14,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.78,"BIOSSES":86.96,"SICK-R":81.73,"STS12":82.57,"STS13":87.15,"STS14":84.97,"STS15":91.05,"STS16":87.31,"STS17 (en-en)":90.03,"STS22 (en)":67.63,"STSBenchmark":88.38} -{"level_0":14,"index":156,"Rank":15,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":84.63,"BIOSSES":85.55,"SICK-R":82.64,"STS12":79.66,"STS13":88.43,"STS14":84.54,"STS15":90.43,"STS16":87.68,"STS17 (en-en)":91.75,"STS22 (en)":66.98,"STSBenchmark":88.6} -{"level_0":15,"index":117,"Rank":16,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.59,"BIOSSES":89.17,"SICK-R":82.8,"STS12":77.13,"STS13":89.29,"STS14":83.83,"STS15":89.7,"STS16":86.43,"STS17 (en-en)":89.66,"STS22 (en)":69.61,"STSBenchmark":88.3} -{"level_0":16,"index":111,"Rank":17,"Model":"UAE-Large-V1-Q5_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.54,"BIOSSES":86.14,"SICK-R":82.62,"STS12":79.09,"STS13":89.62,"STS14":85.02,"STS15":89.51,"STS16":86.61,"STS17 (en-en)":88.99,"STS22 (en)":68.79,"STSBenchmark":89.06} -{"level_0":17,"index":165,"Rank":18,"Model":"UAE-Large-V1-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.54,"BIOSSES":86.14,"SICK-R":82.62,"STS12":79.09,"STS13":89.62,"STS14":85.02,"STS15":89.51,"STS16":86.61,"STS17 (en-en)":88.99,"STS22 (en)":68.79,"STSBenchmark":89.06} -{"level_0":18,"index":108,"Rank":19,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.54,"BIOSSES":86.14,"SICK-R":82.62,"STS12":79.09,"STS13":89.62,"STS14":85.02,"STS15":89.51,"STS16":86.61,"STS17 (en-en)":88.99,"STS22 (en)":68.79,"STSBenchmark":89.06} -{"level_0":19,"index":138,"Rank":20,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.51,"BIOSSES":83.11,"SICK-R":82.89,"STS12":80.09,"STS13":89.68,"STS14":85.07,"STS15":89.39,"STS16":87.15,"STS17 (en-en)":91.35,"STS22 (en)":68.1,"STSBenchmark":88.23} -{"level_0":20,"index":6,"Rank":21,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.31,"BIOSSES":89.24,"SICK-R":83.16,"STS12":73.34,"STS13":88.49,"STS14":86.49,"STS15":91.13,"STS16":85.68,"STS17 (en-en)":90.06,"STS22 (en)":66.32,"STSBenchmark":89.22} -{"level_0":21,"index":21,"Rank":22,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.24,"BIOSSES":86.47,"SICK-R":83.87,"STS12":78.14,"STS13":86.59,"STS14":82.83,"STS15":87.77,"STS16":87.04,"STS17 (en-en)":91.25,"STS22 (en)":70.07,"STSBenchmark":88.42} -{"level_0":22,"index":139,"Rank":23,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.22,"BIOSSES":83.3,"SICK-R":82.21,"STS12":79.52,"STS13":89.19,"STS14":85.15,"STS15":89.1,"STS16":87.14,"STS17 (en-en)":90.97,"STS22 (en)":67.83,"STSBenchmark":87.74} -{"level_0":23,"index":200,"Rank":24,"Model":"bge-large-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.18,"BIOSSES":88.37,"SICK-R":82.06,"STS12":78.83,"STS13":87.99,"STS14":83.5,"STS15":89.0,"STS16":86.45,"STS17 (en-en)":89.56,"STS22 (en)":68.15,"STSBenchmark":87.89} -{"level_0":24,"index":64,"Rank":25,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":84.09,"BIOSSES":85.24,"SICK-R":83.7,"STS12":78.8,"STS13":86.37,"STS14":84.04,"STS15":88.99,"STS16":87.22,"STS17 (en-en)":90.19,"STS22 (en)":67.68,"STSBenchmark":88.65} -{"level_0":25,"index":23,"Rank":26,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.88,"BIOSSES":85.74,"SICK-R":82.66,"STS12":77.71,"STS13":87.45,"STS14":83.48,"STS15":87.63,"STS16":86.7,"STS17 (en-en)":91.18,"STS22 (en)":69.02,"STSBenchmark":87.25} -{"level_0":26,"index":170,"Rank":27,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.85,"BIOSSES":86.75,"SICK-R":82.33,"STS12":77.61,"STS13":87.95,"STS14":83.85,"STS15":88.47,"STS16":86.46,"STS17 (en-en)":88.7,"STS22 (en)":68.02,"STSBenchmark":88.33} -{"level_0":27,"index":60,"Rank":28,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":83.73,"BIOSSES":82.13,"SICK-R":83.01,"STS12":78.85,"STS13":86.84,"STS14":84.04,"STS15":88.72,"STS16":86.79,"STS17 (en-en)":90.63,"STS22 (en)":67.55,"STSBenchmark":88.72} -{"level_0":28,"index":62,"Rank":29,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":83.58,"BIOSSES":84.92,"SICK-R":83.94,"STS12":79.27,"STS13":84.83,"STS14":82.94,"STS15":88.09,"STS16":86.54,"STS17 (en-en)":89.58,"STS22 (en)":67.67,"STSBenchmark":88.05} -{"level_0":29,"index":66,"Rank":30,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":83.58,"BIOSSES":85.88,"SICK-R":82.25,"STS12":78.28,"STS13":85.52,"STS14":82.49,"STS15":88.76,"STS16":87.11,"STS17 (en-en)":90.1,"STS22 (en)":68.25,"STSBenchmark":87.16} -{"level_0":30,"index":115,"Rank":31,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.51,"BIOSSES":87.95,"SICK-R":81.29,"STS12":76.16,"STS13":87.85,"STS14":83.39,"STS15":89.43,"STS16":85.35,"STS17 (en-en)":88.59,"STS22 (en)":67.81,"STSBenchmark":87.32} -{"level_0":31,"index":176,"Rank":32,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.49,"BIOSSES":83.05,"SICK-R":83.01,"STS12":81.62,"STS13":86.82,"STS14":83.56,"STS15":88.75,"STS16":86.03,"STS17 (en-en)":88.56,"STS22 (en)":65.62,"STSBenchmark":87.84} -{"level_0":32,"index":253,"Rank":33,"Model":"gte-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.35,"BIOSSES":88.65,"SICK-R":79.81,"STS12":76.81,"STS13":88.11,"STS14":82.66,"STS15":88.93,"STS16":84.25,"STS17 (en-en)":88.47,"STS22 (en)":69.71,"STSBenchmark":86.07} -{"level_0":33,"index":186,"Rank":34,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.34,"BIOSSES":85.81,"SICK-R":81.75,"STS12":78.51,"STS13":86.62,"STS14":83.06,"STS15":88.39,"STS16":86.82,"STS17 (en-en)":87.9,"STS22 (en)":66.76,"STSBenchmark":87.77} -{"level_0":34,"index":43,"Rank":35,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.26,"BIOSSES":87.34,"SICK-R":80.56,"STS12":73.69,"STS13":85.82,"STS14":82.05,"STS15":88.8,"STS16":86.2,"STS17 (en-en)":91.46,"STS22 (en)":69.21,"STSBenchmark":87.43} -{"level_0":35,"index":36,"Rank":36,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.15,"BIOSSES":85.01,"SICK-R":82.18,"STS12":77.62,"STS13":85.16,"STS14":80.02,"STS15":88.92,"STS16":86.92,"STS17 (en-en)":90.09,"STS22 (en)":66.81,"STSBenchmark":88.79} -{"level_0":36,"index":148,"Rank":37,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":83.15,"BIOSSES":84.39,"SICK-R":81.27,"STS12":76.28,"STS13":88.18,"STS14":81.92,"STS15":89.01,"STS16":85.49,"STS17 (en-en)":90.3,"STS22 (en)":67.74,"STSBenchmark":86.88} -{"level_0":37,"index":150,"Rank":38,"Model":"bge-large-en-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.11,"BIOSSES":84.65,"SICK-R":81.68,"STS12":79.05,"STS13":86.37,"STS14":82.78,"STS15":88.03,"STS16":86.49,"STS17 (en-en)":87.5,"STS22 (en)":67.05,"STSBenchmark":87.52} -{"level_0":38,"index":22,"Rank":39,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":83.11,"BIOSSES":84.65,"SICK-R":81.68,"STS12":79.05,"STS13":86.37,"STS14":82.78,"STS15":88.03,"STS16":86.49,"STS17 (en-en)":87.5,"STS22 (en)":67.05,"STSBenchmark":87.52} -{"level_0":39,"index":114,"Rank":40,"Model":"bge-large-en-v1.5-Q4_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.11,"BIOSSES":84.65,"SICK-R":81.68,"STS12":79.05,"STS13":86.37,"STS14":82.78,"STS15":88.03,"STS16":86.49,"STS17 (en-en)":87.5,"STS22 (en)":67.05,"STSBenchmark":87.52} -{"level_0":40,"index":149,"Rank":41,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":83.06,"BIOSSES":84.15,"SICK-R":81.7,"STS12":75.32,"STS13":87.44,"STS14":81.87,"STS15":88.94,"STS16":85.38,"STS17 (en-en)":90.54,"STS22 (en)":68.65,"STSBenchmark":86.56} -{"level_0":41,"index":205,"Rank":42,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.04,"BIOSSES":81.37,"SICK-R":79.28,"STS12":79.55,"STS13":88.83,"STS14":83.87,"STS15":88.54,"STS16":86.49,"STS17 (en-en)":88.73,"STS22 (en)":66.88,"STSBenchmark":86.85} -{"level_0":42,"index":126,"Rank":43,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.04,"BIOSSES":81.37,"SICK-R":79.28,"STS12":79.55,"STS13":88.83,"STS14":83.87,"STS15":88.54,"STS16":86.49,"STS17 (en-en)":88.73,"STS22 (en)":66.88,"STSBenchmark":86.85} -{"level_0":43,"index":17,"Rank":44,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":83.04,"BIOSSES":81.37,"SICK-R":79.28,"STS12":79.55,"STS13":88.83,"STS14":83.87,"STS15":88.54,"STS16":86.49,"STS17 (en-en)":88.73,"STS22 (en)":66.88,"STSBenchmark":86.85} -{"level_0":44,"index":118,"Rank":45,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.03,"BIOSSES":86.99,"SICK-R":80.53,"STS12":75.57,"STS13":86.26,"STS14":82.3,"STS15":88.74,"STS16":85.27,"STS17 (en-en)":89.02,"STS22 (en)":68.51,"STSBenchmark":87.08} -{"level_0":45,"index":137,"Rank":46,"Model":"stella-base-en-v2-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.02,"BIOSSES":85.94,"SICK-R":81.06,"STS12":78.72,"STS13":84.88,"STS14":83.11,"STS15":88.74,"STS16":86.35,"STS17 (en-en)":87.71,"STS22 (en)":66.28,"STSBenchmark":87.45} -{"level_0":46,"index":151,"Rank":47,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.02,"BIOSSES":85.94,"SICK-R":81.06,"STS12":78.72,"STS13":84.88,"STS14":83.11,"STS15":88.74,"STS16":86.35,"STS17 (en-en)":87.71,"STS22 (en)":66.28,"STSBenchmark":87.45} -{"level_0":47,"index":169,"Rank":48,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.01,"BIOSSES":85.52,"SICK-R":81.41,"STS12":77.47,"STS13":86.38,"STS14":81.17,"STS15":88.23,"STS16":86.29,"STS17 (en-en)":90.62,"STS22 (en)":65.01,"STSBenchmark":88.02} -{"level_0":48,"index":119,"Rank":49,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.0,"BIOSSES":87.2,"SICK-R":80.31,"STS12":75.76,"STS13":86.08,"STS14":82.28,"STS15":88.9,"STS16":85.18,"STS17 (en-en)":88.73,"STS22 (en)":68.54,"STSBenchmark":86.98} -{"level_0":49,"index":125,"Rank":50,"Model":"NoInstruct-small-Embedding-v0-Q4_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.0,"BIOSSES":87.2,"SICK-R":80.31,"STS12":75.76,"STS13":86.08,"STS14":82.28,"STS15":88.9,"STS16":85.18,"STS17 (en-en)":88.73,"STS22 (en)":68.54,"STSBenchmark":86.98} -{"level_0":50,"index":8,"Rank":51,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.93,"BIOSSES":84.85,"SICK-R":79.71,"STS12":77.09,"STS13":88.91,"STS14":82.08,"STS15":89.21,"STS16":84.74,"STS17 (en-en)":90.73,"STS22 (en)":62.1,"STSBenchmark":89.86} -{"level_0":51,"index":268,"Rank":52,"Model":"gte-large-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.88,"BIOSSES":87.98,"SICK-R":79.13,"STS12":75.98,"STS13":87.55,"STS14":81.99,"STS15":88.93,"STS16":83.59,"STS17 (en-en)":88.51,"STS22 (en)":69.72,"STSBenchmark":85.4} -{"level_0":52,"index":215,"Rank":53,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Average":82.84,"BIOSSES":85.59,"SICK-R":82.8,"STS12":76.22,"STS13":86.3,"STS14":82.09,"STS15":87.24,"STS16":84.77,"STS17 (en-en)":87.42,"STS22 (en)":69.85,"STSBenchmark":86.14} -{"level_0":53,"index":198,"Rank":54,"Model":"bge-base-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.79,"BIOSSES":86.71,"SICK-R":80.06,"STS12":77.75,"STS13":86.08,"STS14":82.89,"STS15":88.54,"STS16":85.76,"STS17 (en-en)":87.82,"STS22 (en)":65.46,"STSBenchmark":86.79} -{"level_0":54,"index":204,"Rank":55,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.73,"BIOSSES":82.11,"SICK-R":80.49,"STS12":77.25,"STS13":87.79,"STS14":82.91,"STS15":88.45,"STS16":85.45,"STS17 (en-en)":90.33,"STS22 (en)":66.1,"STSBenchmark":86.38} -{"level_0":55,"index":16,"Rank":56,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.73,"BIOSSES":82.11,"SICK-R":80.49,"STS12":77.25,"STS13":87.79,"STS14":82.91,"STS15":88.45,"STS16":85.45,"STS17 (en-en)":90.33,"STS22 (en)":66.1,"STSBenchmark":86.38} -{"level_0":56,"index":246,"Rank":57,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":82.63,"BIOSSES":80.43,"SICK-R":80.47,"STS12":78.85,"STS13":88.94,"STS14":84.86,"STS15":89.32,"STS16":84.67,"STS17 (en-en)":89.46,"STS22 (en)":65.33,"STSBenchmark":84.01} -{"level_0":57,"index":34,"Rank":58,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.62,"BIOSSES":83.5,"SICK-R":81.27,"STS12":74.37,"STS13":85.2,"STS14":80.98,"STS15":89.23,"STS16":86.32,"STS17 (en-en)":90.34,"STS22 (en)":66.42,"STSBenchmark":88.55} -{"level_0":58,"index":174,"Rank":59,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.61,"BIOSSES":76.91,"SICK-R":80.33,"STS12":80.59,"STS13":86.59,"STS14":82.99,"STS15":88.79,"STS16":84.44,"STS17 (en-en)":89.31,"STS22 (en)":67.97,"STSBenchmark":88.14} -{"level_0":59,"index":178,"Rank":60,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.56,"BIOSSES":86.54,"SICK-R":83.23,"STS12":76.13,"STS13":83.19,"STS14":80.6,"STS15":87.16,"STS16":85.16,"STS17 (en-en)":90.88,"STS22 (en)":67.04,"STSBenchmark":85.67} -{"level_0":60,"index":201,"Rank":61,"Model":"bge-large-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.5,"BIOSSES":85.64,"SICK-R":80.74,"STS12":76.07,"STS13":86.81,"STS14":81.68,"STS15":88.75,"STS16":84.61,"STS17 (en-en)":89.75,"STS22 (en)":64.1,"STSBenchmark":86.88} -{"level_0":61,"index":15,"Rank":62,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":82.42,"BIOSSES":81.12,"SICK-R":79.15,"STS12":76.52,"STS13":88.63,"STS14":83.32,"STS15":87.5,"STS16":86.39,"STS17 (en-en)":87.79,"STS22 (en)":66.4,"STSBenchmark":87.35} -{"level_0":62,"index":179,"Rank":63,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.4,"BIOSSES":86.94,"SICK-R":80.3,"STS12":78.03,"STS13":84.19,"STS14":82.27,"STS15":87.96,"STS16":85.48,"STS17 (en-en)":86.42,"STS22 (en)":65.95,"STSBenchmark":86.42} -{"level_0":63,"index":20,"Rank":64,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":82.4,"BIOSSES":86.94,"SICK-R":80.3,"STS12":78.03,"STS13":84.19,"STS14":82.27,"STS15":87.96,"STS16":85.48,"STS17 (en-en)":86.42,"STS22 (en)":65.95,"STSBenchmark":86.42} -{"level_0":64,"index":180,"Rank":65,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.4,"BIOSSES":86.94,"SICK-R":80.3,"STS12":78.03,"STS13":84.19,"STS14":82.27,"STS15":87.96,"STS16":85.48,"STS17 (en-en)":86.42,"STS22 (en)":65.95,"STSBenchmark":86.42} -{"level_0":65,"index":120,"Rank":66,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.4,"BIOSSES":86.94,"SICK-R":80.3,"STS12":78.03,"STS13":84.19,"STS14":82.27,"STS15":87.96,"STS16":85.48,"STS17 (en-en)":86.42,"STS22 (en)":65.95,"STSBenchmark":86.42} -{"level_0":66,"index":182,"Rank":67,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.4,"BIOSSES":86.94,"SICK-R":80.3,"STS12":78.03,"STS13":84.19,"STS14":82.27,"STS15":87.96,"STS16":85.48,"STS17 (en-en)":86.42,"STS22 (en)":65.95,"STSBenchmark":86.42} -{"level_0":67,"index":181,"Rank":68,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.4,"BIOSSES":86.94,"SICK-R":80.3,"STS12":78.03,"STS13":84.19,"STS14":82.27,"STS15":87.96,"STS16":85.48,"STS17 (en-en)":86.42,"STS22 (en)":65.95,"STSBenchmark":86.42} -{"level_0":68,"index":167,"Rank":69,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.37,"BIOSSES":84.82,"SICK-R":80.85,"STS12":76.1,"STS13":85.49,"STS14":80.44,"STS15":87.75,"STS16":85.42,"STS17 (en-en)":89.95,"STS22 (en)":65.13,"STSBenchmark":87.71} -{"level_0":69,"index":252,"Rank":70,"Model":"gte-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.3,"BIOSSES":87.64,"SICK-R":78.86,"STS12":75.71,"STS13":85.73,"STS14":81.51,"STS15":88.81,"STS16":83.82,"STS17 (en-en)":87.9,"STS22 (en)":67.33,"STSBenchmark":85.73} -{"level_0":70,"index":147,"Rank":71,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":82.29,"BIOSSES":82.31,"SICK-R":80.26,"STS12":77.02,"STS13":86.58,"STS14":81.32,"STS15":88.19,"STS16":84.88,"STS17 (en-en)":89.46,"STS22 (en)":66.45,"STSBenchmark":86.43} -{"level_0":71,"index":254,"Rank":72,"Model":"gte-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.07,"BIOSSES":88.17,"SICK-R":77.93,"STS12":75.12,"STS13":85.09,"STS14":81.03,"STS15":88.32,"STS16":83.91,"STS17 (en-en)":87.59,"STS22 (en)":68.0,"STSBenchmark":85.57} -{"level_0":72,"index":28,"Rank":73,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.06,"BIOSSES":86.47,"SICK-R":78.58,"STS12":78.92,"STS13":85.4,"STS14":81.64,"STS15":87.2,"STS16":85.44,"STS17 (en-en)":87.2,"STS22 (en)":64.22,"STSBenchmark":85.54} -{"level_0":73,"index":26,"Rank":74,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.06,"BIOSSES":86.47,"SICK-R":78.58,"STS12":78.92,"STS13":85.4,"STS14":81.64,"STS15":87.2,"STS16":85.44,"STS17 (en-en)":87.2,"STS22 (en)":64.22,"STSBenchmark":85.54} -{"level_0":74,"index":206,"Rank":75,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":82.06,"BIOSSES":86.47,"SICK-R":78.58,"STS12":78.92,"STS13":85.4,"STS14":81.64,"STS15":87.2,"STS16":85.44,"STS17 (en-en)":87.2,"STS22 (en)":64.22,"STSBenchmark":85.54} -{"level_0":75,"index":129,"Rank":76,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":82.06,"BIOSSES":86.47,"SICK-R":78.58,"STS12":78.92,"STS13":85.4,"STS14":81.64,"STS15":87.2,"STS16":85.44,"STS17 (en-en)":87.2,"STS22 (en)":64.22,"STSBenchmark":85.54} -{"level_0":76,"index":29,"Rank":77,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.06,"BIOSSES":86.47,"SICK-R":78.58,"STS12":78.92,"STS13":85.4,"STS14":81.64,"STS15":87.2,"STS16":85.44,"STS17 (en-en)":87.2,"STS22 (en)":64.22,"STSBenchmark":85.54} -{"level_0":77,"index":27,"Rank":78,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.06,"BIOSSES":86.47,"SICK-R":78.58,"STS12":78.92,"STS13":85.4,"STS14":81.64,"STS15":87.2,"STS16":85.44,"STS17 (en-en)":87.2,"STS22 (en)":64.22,"STSBenchmark":85.54} -{"level_0":78,"index":154,"Rank":79,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":82.06,"BIOSSES":84.73,"SICK-R":80.49,"STS12":75.93,"STS13":85.22,"STS14":80.54,"STS15":88.81,"STS16":85.28,"STS17 (en-en)":89.37,"STS22 (en)":62.99,"STSBenchmark":87.21} -{"level_0":79,"index":93,"Rank":80,"Model":"e5-large-v2-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.05,"BIOSSES":83.6,"SICK-R":79.28,"STS12":76.95,"STS13":84.12,"STS14":80.46,"STS15":89.76,"STS16":85.47,"STS17 (en-en)":89.03,"STS22 (en)":64.11,"STSBenchmark":87.74} -{"level_0":80,"index":155,"Rank":81,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":82.05,"BIOSSES":83.6,"SICK-R":79.28,"STS12":76.95,"STS13":84.12,"STS14":80.46,"STS15":89.76,"STS16":85.47,"STS17 (en-en)":89.03,"STS22 (en)":64.11,"STSBenchmark":87.74} -{"level_0":81,"index":202,"Rank":82,"Model":"bge-small-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.04,"BIOSSES":86.66,"SICK-R":79.44,"STS12":76.17,"STS13":84.15,"STS14":81.49,"STS15":88.11,"STS16":84.99,"STS17 (en-en)":87.98,"STS22 (en)":65.07,"STSBenchmark":86.31} -{"level_0":82,"index":18,"Rank":83,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.97,"BIOSSES":83.65,"SICK-R":79.37,"STS12":74.38,"STS13":84.71,"STS14":80.1,"STS15":87.16,"STS16":85.02,"STS17 (en-en)":90.64,"STS22 (en)":68.63,"STSBenchmark":86.04} -{"level_0":83,"index":213,"Rank":84,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.94,"BIOSSES":84.25,"SICK-R":79.38,"STS12":78.52,"STS13":86.05,"STS14":81.54,"STS15":86.97,"STS16":84.77,"STS17 (en-en)":87.47,"STS22 (en)":65.02,"STSBenchmark":85.47} -{"level_0":84,"index":269,"Rank":85,"Model":"gte-large-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.9,"BIOSSES":85.83,"SICK-R":79.0,"STS12":73.48,"STS13":86.14,"STS14":80.36,"STS15":88.53,"STS16":84.08,"STS17 (en-en)":89.42,"STS22 (en)":66.75,"STSBenchmark":85.45} -{"level_0":85,"index":244,"Rank":86,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":81.83,"BIOSSES":78.93,"SICK-R":80.34,"STS12":79.11,"STS13":87.33,"STS14":83.17,"STS15":88.28,"STS16":84.36,"STS17 (en-en)":88.99,"STS22 (en)":62.39,"STSBenchmark":85.36} -{"level_0":86,"index":283,"Rank":87,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.73,"BIOSSES":84.68,"SICK-R":79.0,"STS12":72.84,"STS13":86.1,"STS14":81.15,"STS15":88.49,"STS16":85.08,"STS17 (en-en)":90.22,"STS22 (en)":66.14,"STSBenchmark":83.56} -{"level_0":87,"index":211,"Rank":88,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":81.7,"BIOSSES":83.3,"SICK-R":79.27,"STS12":78.3,"STS13":85.81,"STS14":81.38,"STS15":86.79,"STS16":84.56,"STS17 (en-en)":87.25,"STS22 (en)":65.24,"STSBenchmark":85.14} -{"level_0":88,"index":245,"Rank":89,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":81.66,"BIOSSES":73.12,"SICK-R":79.98,"STS12":79.02,"STS13":88.8,"STS14":84.33,"STS15":88.89,"STS16":85.31,"STS17 (en-en)":88.91,"STS22 (en)":64.32,"STSBenchmark":83.93} -{"level_0":89,"index":24,"Rank":90,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":81.59,"BIOSSES":83.75,"SICK-R":79.41,"STS12":77.44,"STS13":82.98,"STS14":81.84,"STS15":87.26,"STS16":84.93,"STS17 (en-en)":87.15,"STS22 (en)":65.3,"STSBenchmark":85.86} -{"level_0":90,"index":284,"Rank":91,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.58,"BIOSSES":88.72,"SICK-R":76.73,"STS12":73.09,"STS13":84.92,"STS14":79.81,"STS15":88.01,"STS16":84.41,"STS17 (en-en)":90.94,"STS22 (en)":64.96,"STSBenchmark":84.24} -{"level_0":91,"index":160,"Rank":92,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":81.56,"BIOSSES":82.51,"SICK-R":80.23,"STS12":80.02,"STS13":81.55,"STS14":77.72,"STS15":89.31,"STS16":85.78,"STS17 (en-en)":88.12,"STS22 (en)":63.06,"STSBenchmark":87.29} -{"level_0":92,"index":112,"Rank":93,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.54,"BIOSSES":87.13,"SICK-R":76.49,"STS12":75.0,"STS13":87.91,"STS14":82.26,"STS15":87.87,"STS16":80.92,"STS17 (en-en)":87.23,"STS22 (en)":68.59,"STSBenchmark":81.95} -{"level_0":93,"index":166,"Rank":94,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.52,"BIOSSES":84.65,"SICK-R":79.92,"STS12":75.8,"STS13":83.62,"STS14":78.73,"STS15":86.41,"STS16":84.5,"STS17 (en-en)":89.95,"STS22 (en)":65.81,"STSBenchmark":85.79} -{"level_0":94,"index":270,"Rank":95,"Model":"gte-small-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.5,"BIOSSES":87.81,"SICK-R":77.31,"STS12":73.99,"STS13":84.23,"STS14":80.09,"STS15":88.2,"STS16":83.61,"STS17 (en-en)":87.8,"STS22 (en)":66.84,"STSBenchmark":85.13} -{"level_0":95,"index":19,"Rank":96,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.43,"BIOSSES":85.39,"SICK-R":79.78,"STS12":73.88,"STS13":85.08,"STS14":79.61,"STS15":86.15,"STS16":81.6,"STS17 (en-en)":89.11,"STS22 (en)":70.59,"STSBenchmark":83.07} -{"level_0":96,"index":210,"Rank":97,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":81.34,"BIOSSES":81.58,"SICK-R":79.24,"STS12":78.16,"STS13":86.01,"STS14":81.25,"STS15":86.51,"STS16":84.24,"STS17 (en-en)":86.44,"STS22 (en)":65.14,"STSBenchmark":84.8} -{"level_0":97,"index":95,"Rank":98,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.26,"BIOSSES":87.6,"SICK-R":77.01,"STS12":75.67,"STS13":82.4,"STS14":79.93,"STS15":85.82,"STS16":84.5,"STS17 (en-en)":88.93,"STS22 (en)":67.1,"STSBenchmark":83.6} -{"level_0":98,"index":199,"Rank":99,"Model":"bge-base-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.19,"BIOSSES":83.94,"SICK-R":78.64,"STS12":74.88,"STS13":83.64,"STS14":80.4,"STS15":88.01,"STS16":84.31,"STS17 (en-en)":88.3,"STS22 (en)":63.84,"STSBenchmark":85.93} -{"level_0":99,"index":243,"Rank":100,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":81.14,"BIOSSES":75.89,"SICK-R":80.18,"STS12":78.05,"STS13":85.85,"STS14":82.19,"STS15":87.46,"STS16":84.03,"STS17 (en-en)":89.57,"STS22 (en)":62.66,"STSBenchmark":85.52} -{"level_0":100,"index":140,"Rank":101,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":81.05,"BIOSSES":81.4,"SICK-R":78.3,"STS12":75.79,"STS13":83.58,"STS14":79.95,"STS15":88.82,"STS16":84.46,"STS17 (en-en)":87.58,"STS22 (en)":64.07,"STSBenchmark":86.52} -{"level_0":101,"index":153,"Rank":102,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":81.05,"BIOSSES":81.4,"SICK-R":78.3,"STS12":75.79,"STS13":83.58,"STS14":79.95,"STS15":88.82,"STS16":84.46,"STS17 (en-en)":87.58,"STS22 (en)":64.07,"STSBenchmark":86.52} -{"level_0":102,"index":282,"Rank":103,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.04,"BIOSSES":84.87,"SICK-R":79.18,"STS12":71.98,"STS13":85.52,"STS14":80.5,"STS15":87.51,"STS16":84.48,"STS17 (en-en)":88.11,"STS22 (en)":65.92,"STSBenchmark":82.34} -{"level_0":103,"index":281,"Rank":104,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.97,"BIOSSES":86.35,"SICK-R":80.6,"STS12":69.8,"STS13":83.27,"STS14":76.09,"STS15":86.12,"STS16":85.96,"STS17 (en-en)":90.25,"STS22 (en)":68.12,"STSBenchmark":83.17} -{"level_0":104,"index":152,"Rank":105,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":80.96,"BIOSSES":85.1,"SICK-R":79.66,"STS12":74.22,"STS13":83.31,"STS14":78.52,"STS15":88.35,"STS16":84.15,"STS17 (en-en)":87.23,"STS22 (en)":62.88,"STSBenchmark":86.18} -{"level_0":105,"index":172,"Rank":106,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.96,"BIOSSES":84.43,"SICK-R":79.2,"STS12":74.52,"STS13":83.16,"STS14":78.09,"STS15":86.91,"STS16":83.65,"STS17 (en-en)":90.16,"STS22 (en)":64.88,"STSBenchmark":84.6} -{"level_0":106,"index":33,"Rank":107,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.92,"BIOSSES":81.72,"SICK-R":79.65,"STS12":72.6,"STS13":82.88,"STS14":79.74,"STS15":86.98,"STS16":84.41,"STS17 (en-en)":89.22,"STS22 (en)":65.42,"STSBenchmark":86.58} -{"level_0":107,"index":157,"Rank":108,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":80.87,"BIOSSES":84.22,"SICK-R":78.9,"STS12":75.19,"STS13":81.8,"STS14":78.48,"STS15":87.49,"STS16":84.58,"STS17 (en-en)":87.94,"STS22 (en)":63.76,"STSBenchmark":86.36} -{"level_0":108,"index":208,"Rank":109,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.84,"BIOSSES":87.19,"SICK-R":74.19,"STS12":73.22,"STS13":84.77,"STS14":79.99,"STS15":87.27,"STS16":82.31,"STS17 (en-en)":89.54,"STS22 (en)":65.93,"STSBenchmark":83.96} -{"level_0":109,"index":209,"Rank":110,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":80.74,"BIOSSES":80.19,"SICK-R":79.09,"STS12":77.49,"STS13":85.62,"STS14":80.5,"STS15":85.84,"STS16":83.9,"STS17 (en-en)":86.27,"STS22 (en)":64.24,"STSBenchmark":84.28} -{"level_0":110,"index":116,"Rank":111,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.72,"BIOSSES":81.26,"SICK-R":79.09,"STS12":75.04,"STS13":83.26,"STS14":78.62,"STS15":87.03,"STS16":83.01,"STS17 (en-en)":87.36,"STS22 (en)":68.11,"STSBenchmark":84.4} -{"level_0":111,"index":135,"Rank":112,"Model":"jina-embeddings-v2-base-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.7,"BIOSSES":81.23,"SICK-R":79.65,"STS12":74.27,"STS13":84.18,"STS14":78.81,"STS15":87.55,"STS16":85.35,"STS17 (en-en)":88.88,"STS22 (en)":62.2,"STSBenchmark":84.84} -{"level_0":112,"index":175,"Rank":113,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":80.7,"BIOSSES":81.23,"SICK-R":79.65,"STS12":74.27,"STS13":84.18,"STS14":78.81,"STS15":87.55,"STS16":85.35,"STS17 (en-en)":88.88,"STS22 (en)":62.2,"STSBenchmark":84.84} -{"level_0":113,"index":267,"Rank":114,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.59,"BIOSSES":82.19,"SICK-R":79.25,"STS12":73.71,"STS13":82.32,"STS14":80.07,"STS15":87.21,"STS16":82.33,"STS17 (en-en)":87.58,"STS22 (en)":66.3,"STSBenchmark":84.92} -{"level_0":114,"index":84,"Rank":115,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.53,"BIOSSES":79.5,"SICK-R":79.59,"STS12":74.29,"STS13":85.35,"STS14":79.21,"STS15":85.52,"STS16":82.54,"STS17 (en-en)":90.44,"STS22 (en)":63.2,"STSBenchmark":85.67} -{"level_0":115,"index":185,"Rank":116,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.47,"BIOSSES":80.72,"SICK-R":81.87,"STS12":70.75,"STS13":84.37,"STS14":74.51,"STS15":86.41,"STS16":84.74,"STS17 (en-en)":89.03,"STS22 (en)":65.87,"STSBenchmark":86.46} -{"level_0":116,"index":107,"Rank":117,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.46,"BIOSSES":86.63,"SICK-R":75.85,"STS12":72.58,"STS13":82.39,"STS14":77.98,"STS15":86.54,"STS16":83.31,"STS17 (en-en)":88.28,"STS22 (en)":66.68,"STSBenchmark":84.38} -{"level_0":117,"index":158,"Rank":118,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.39,"BIOSSES":79.43,"SICK-R":78.51,"STS12":76.21,"STS13":82.4,"STS14":79.0,"STS15":87.76,"STS16":83.8,"STS17 (en-en)":87.72,"STS22 (en)":63.15,"STSBenchmark":85.95} -{"level_0":118,"index":230,"Rank":119,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":80.32,"BIOSSES":80.43,"SICK-R":80.59,"STS12":72.63,"STS13":83.48,"STS14":78.0,"STS15":85.66,"STS16":80.03,"STS17 (en-en)":90.6,"STS22 (en)":68.39,"STSBenchmark":83.42} -{"level_0":119,"index":159,"Rank":120,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":80.26,"BIOSSES":85.06,"SICK-R":78.51,"STS12":76.7,"STS13":78.03,"STS14":76.6,"STS15":88.16,"STS16":84.28,"STS17 (en-en)":87.83,"STS22 (en)":61.83,"STSBenchmark":85.64} -{"level_0":120,"index":248,"Rank":121,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.24,"BIOSSES":66.16,"SICK-R":79.97,"STS12":80.91,"STS13":82.86,"STS14":87.36,"STS15":88.31,"STS16":81.61,"STS17 (en-en)":85.81,"STS22 (en)":62.99,"STSBenchmark":86.45} -{"level_0":121,"index":207,"Rank":122,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.22,"BIOSSES":85.37,"SICK-R":77.3,"STS12":77.97,"STS13":79.39,"STS14":78.17,"STS15":85.51,"STS16":84.95,"STS17 (en-en)":85.87,"STS22 (en)":64.28,"STSBenchmark":83.42} -{"level_0":122,"index":132,"Rank":123,"Model":"embedder-100p<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.17,"BIOSSES":75.32,"SICK-R":80.91,"STS12":76.98,"STS13":82.63,"STS14":77.84,"STS15":85.92,"STS16":80.68,"STS17 (en-en)":88.99,"STS22 (en)":68.26,"STSBenchmark":84.2} -{"level_0":123,"index":35,"Rank":124,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.09,"BIOSSES":84.01,"SICK-R":77.04,"STS12":73.22,"STS13":79.5,"STS14":76.96,"STS15":86.43,"STS16":84.33,"STS17 (en-en)":88.67,"STS22 (en)":65.31,"STSBenchmark":85.46} -{"level_0":124,"index":136,"Rank":125,"Model":"jina-embeddings-v2-small-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.0,"BIOSSES":80.52,"SICK-R":76.72,"STS12":73.66,"STS13":83.3,"STS14":79.17,"STS15":87.3,"STS16":83.6,"STS17 (en-en)":88.23,"STS22 (en)":63.46,"STSBenchmark":84.04} -{"level_0":125,"index":177,"Rank":126,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.0,"BIOSSES":80.52,"SICK-R":76.72,"STS12":73.66,"STS13":83.3,"STS14":79.17,"STS15":87.3,"STS16":83.6,"STS17 (en-en)":88.23,"STS22 (en)":63.46,"STSBenchmark":84.04} -{"level_0":126,"index":168,"Rank":127,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.93,"BIOSSES":84.44,"SICK-R":78.53,"STS12":74.07,"STS13":81.81,"STS14":77.05,"STS15":84.61,"STS16":83.09,"STS17 (en-en)":89.15,"STS22 (en)":62.92,"STSBenchmark":83.63} -{"level_0":127,"index":171,"Rank":128,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.93,"BIOSSES":83.58,"SICK-R":79.14,"STS12":75.06,"STS13":80.86,"STS14":76.13,"STS15":85.55,"STS16":81.21,"STS17 (en-en)":88.98,"STS22 (en)":66.22,"STSBenchmark":82.57} -{"level_0":128,"index":69,"Rank":129,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.89,"BIOSSES":87.32,"SICK-R":75.63,"STS12":68.58,"STS13":80.54,"STS14":77.63,"STS15":86.16,"STS16":82.82,"STS17 (en-en)":88.57,"STS22 (en)":67.39,"STSBenchmark":84.25} -{"level_0":129,"index":228,"Rank":130,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":79.84,"BIOSSES":83.57,"SICK-R":79.32,"STS12":73.08,"STS13":82.13,"STS14":76.73,"STS15":85.58,"STS16":80.23,"STS17 (en-en)":88.63,"STS22 (en)":66.0,"STSBenchmark":83.09} -{"level_0":130,"index":212,"Rank":131,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":79.79,"BIOSSES":77.18,"SICK-R":78.76,"STS12":77.3,"STS13":84.18,"STS14":79.37,"STS15":84.69,"STS16":83.36,"STS17 (en-en)":85.73,"STS22 (en)":63.83,"STSBenchmark":83.46} -{"level_0":131,"index":271,"Rank":132,"Model":"bi-cse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.66,"BIOSSES":70.56,"SICK-R":79.04,"STS12":78.39,"STS13":83.72,"STS14":79.19,"STS15":85.58,"STS16":82.33,"STS17 (en-en)":87.9,"STS22 (en)":63.92,"STSBenchmark":85.99} -{"level_0":132,"index":42,"Rank":133,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":79.54,"BIOSSES":85.01,"SICK-R":81.47,"STS12":65.84,"STS13":78.37,"STS14":77.52,"STS15":85.43,"STS16":79.94,"STS17 (en-en)":90.12,"STS22 (en)":68.59,"STSBenchmark":83.1} -{"level_0":133,"index":68,"Rank":134,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.18,"BIOSSES":85.05,"SICK-R":76.04,"STS12":69.5,"STS13":80.96,"STS14":77.08,"STS15":85.42,"STS16":82.3,"STS17 (en-en)":88.03,"STS22 (en)":64.12,"STSBenchmark":83.34} -{"level_0":134,"index":217,"Rank":135,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":79.12,"BIOSSES":68.38,"SICK-R":80.77,"STS12":75.3,"STS13":84.67,"STS14":80.19,"STS15":85.4,"STS16":80.82,"STS17 (en-en)":89.44,"STS22 (en)":61.96,"STSBenchmark":84.25} -{"level_0":135,"index":183,"Rank":136,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.11,"BIOSSES":83.86,"SICK-R":79.38,"STS12":73.65,"STS13":75.57,"STS14":72.1,"STS15":85.63,"STS16":82.07,"STS17 (en-en)":89.0,"STS22 (en)":66.16,"STSBenchmark":83.69} -{"level_0":136,"index":162,"Rank":137,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":79.1,"BIOSSES":82.26,"SICK-R":77.51,"STS12":76.56,"STS13":76.97,"STS14":75.52,"STS15":87.12,"STS16":83.63,"STS17 (en-en)":86.44,"STS22 (en)":60.94,"STSBenchmark":84.01} -{"level_0":137,"index":184,"Rank":138,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.02,"BIOSSES":80.25,"SICK-R":79.68,"STS12":69.94,"STS13":79.63,"STS14":71.34,"STS15":82.81,"STS16":82.65,"STS17 (en-en)":88.89,"STS22 (en)":68.48,"STSBenchmark":86.5} -{"level_0":138,"index":229,"Rank":139,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":78.9,"BIOSSES":81.64,"SICK-R":77.58,"STS12":72.37,"STS13":80.6,"STS14":75.59,"STS15":85.39,"STS16":78.99,"STS17 (en-en)":87.59,"STS22 (en)":67.21,"STSBenchmark":82.03} -{"level_0":139,"index":72,"Rank":140,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.81,"BIOSSES":80.78,"SICK-R":78.24,"STS12":75.19,"STS13":79.33,"STS14":76.56,"STS15":84.7,"STS16":81.44,"STS17 (en-en)":86.61,"STS22 (en)":63.43,"STSBenchmark":81.79} -{"level_0":140,"index":106,"Rank":141,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.65,"BIOSSES":82.92,"SICK-R":73.62,"STS12":71.89,"STS13":79.85,"STS14":76.86,"STS15":84.77,"STS16":81.91,"STS17 (en-en)":86.82,"STS22 (en)":65.38,"STSBenchmark":82.5} -{"level_0":141,"index":277,"Rank":142,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.6,"BIOSSES":78.04,"SICK-R":77.48,"STS12":72.3,"STS13":81.49,"STS14":74.74,"STS15":84.28,"STS16":82.06,"STS17 (en-en)":87.08,"STS22 (en)":64.71,"STSBenchmark":83.78} -{"level_0":142,"index":113,"Rank":143,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.59,"BIOSSES":81.6,"SICK-R":74.18,"STS12":72.2,"STS13":80.54,"STS14":76.2,"STS15":85.2,"STS16":81.93,"STS17 (en-en)":86.61,"STS22 (en)":65.46,"STSBenchmark":81.94} -{"level_0":143,"index":203,"Rank":144,"Model":"bge-small-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.53,"BIOSSES":78.95,"SICK-R":74.39,"STS12":72.22,"STS13":81.0,"STS14":76.99,"STS15":85.21,"STS16":82.83,"STS17 (en-en)":87.78,"STS22 (en)":64.17,"STSBenchmark":81.77} -{"level_0":144,"index":65,"Rank":145,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":78.5,"BIOSSES":83.29,"SICK-R":75.55,"STS12":67.65,"STS13":83.9,"STS14":76.97,"STS15":83.8,"STS16":81.91,"STS17 (en-en)":85.58,"STS22 (en)":65.93,"STSBenchmark":80.42} -{"level_0":145,"index":103,"Rank":146,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.44,"BIOSSES":86.27,"SICK-R":69.66,"STS12":68.79,"STS13":79.62,"STS14":75.58,"STS15":84.64,"STS16":82.4,"STS17 (en-en)":86.73,"STS22 (en)":69.49,"STSBenchmark":81.19} -{"level_0":146,"index":238,"Rank":147,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":78.38,"BIOSSES":81.91,"SICK-R":74.29,"STS12":70.12,"STS13":82.72,"STS14":78.24,"STS15":86.26,"STS16":81.61,"STS17 (en-en)":85.18,"STS22 (en)":65.76,"STSBenchmark":77.73} -{"level_0":147,"index":105,"Rank":148,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.37,"BIOSSES":83.42,"SICK-R":72.39,"STS12":71.9,"STS13":80.93,"STS14":76.6,"STS15":84.92,"STS16":80.72,"STS17 (en-en)":85.61,"STS22 (en)":65.9,"STSBenchmark":81.32} -{"level_0":148,"index":236,"Rank":149,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":78.19,"BIOSSES":84.86,"SICK-R":73.39,"STS12":70.33,"STS13":82.19,"STS14":77.16,"STS15":86.31,"STS16":81.85,"STS17 (en-en)":83.93,"STS22 (en)":64.3,"STSBenchmark":77.6} -{"level_0":149,"index":83,"Rank":150,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.1,"BIOSSES":86.25,"SICK-R":69.63,"STS12":67.5,"STS13":79.16,"STS14":74.46,"STS15":84.47,"STS16":80.96,"STS17 (en-en)":87.78,"STS22 (en)":69.35,"STSBenchmark":81.39} -{"level_0":150,"index":173,"Rank":151,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.06,"BIOSSES":82.96,"SICK-R":76.33,"STS12":74.28,"STS13":78.55,"STS14":73.84,"STS15":83.71,"STS16":80.03,"STS17 (en-en)":87.49,"STS22 (en)":64.25,"STSBenchmark":79.2} -{"level_0":151,"index":237,"Rank":152,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":77.8,"BIOSSES":78.94,"SICK-R":73.63,"STS12":69.11,"STS13":81.82,"STS14":77.07,"STS15":86.01,"STS16":82.23,"STS17 (en-en)":84.9,"STS22 (en)":66.61,"STSBenchmark":77.65} -{"level_0":152,"index":123,"Rank":153,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.74,"BIOSSES":85.31,"SICK-R":69.82,"STS12":69.66,"STS13":79.67,"STS14":74.61,"STS15":83.81,"STS16":80.4,"STS17 (en-en)":87.07,"STS22 (en)":66.13,"STSBenchmark":80.9} -{"level_0":153,"index":101,"Rank":154,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.08,"BIOSSES":88.24,"SICK-R":70.91,"STS12":66.11,"STS13":79.82,"STS14":73.64,"STS15":83.23,"STS16":81.58,"STS17 (en-en)":80.59,"STS22 (en)":68.79,"STSBenchmark":77.9} -{"level_0":154,"index":235,"Rank":155,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":77.07,"BIOSSES":79.0,"SICK-R":71.45,"STS12":68.59,"STS13":79.09,"STS14":74.64,"STS15":84.85,"STS16":81.57,"STS17 (en-en)":85.8,"STS22 (en)":66.17,"STSBenchmark":79.58} -{"level_0":155,"index":285,"Rank":156,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":76.95,"BIOSSES":83.79,"SICK-R":68.78,"STS12":64.81,"STS13":80.1,"STS14":74.96,"STS15":83.7,"STS16":80.55,"STS17 (en-en)":85.74,"STS22 (en)":67.5,"STSBenchmark":79.54} -{"level_0":156,"index":82,"Rank":157,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.83,"BIOSSES":84.84,"SICK-R":68.2,"STS12":66.99,"STS13":77.58,"STS14":72.78,"STS15":82.62,"STS16":80.1,"STS17 (en-en)":87.25,"STS22 (en)":68.75,"STSBenchmark":79.21} -{"level_0":157,"index":77,"Rank":158,"Model":"test24<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.56,"BIOSSES":80.94,"SICK-R":69.07,"STS12":71.78,"STS13":77.7,"STS14":74.04,"STS15":83.13,"STS16":78.88,"STS17 (en-en)":85.55,"STS22 (en)":65.55,"STSBenchmark":78.93} -{"level_0":158,"index":214,"Rank":159,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":76.51,"BIOSSES":83.32,"SICK-R":70.2,"STS12":64.34,"STS13":80.03,"STS14":74.51,"STS15":83.3,"STS16":79.67,"STS17 (en-en)":86.32,"STS22 (en)":64.64,"STSBenchmark":78.81} -{"level_0":159,"index":239,"Rank":160,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":76.47,"BIOSSES":77.32,"SICK-R":72.0,"STS12":68.19,"STS13":80.4,"STS14":74.02,"STS15":82.57,"STS16":79.78,"STS17 (en-en)":85.94,"STS22 (en)":67.54,"STSBenchmark":76.97} -{"level_0":160,"index":61,"Rank":161,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":76.4,"BIOSSES":82.41,"SICK-R":71.77,"STS12":65.39,"STS13":79.26,"STS14":72.98,"STS15":82.72,"STS16":81.02,"STS17 (en-en)":86.7,"STS22 (en)":63.47,"STSBenchmark":78.32} -{"level_0":161,"index":104,"Rank":162,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.39,"BIOSSES":84.05,"SICK-R":69.26,"STS12":65.9,"STS13":77.87,"STS14":72.82,"STS15":83.49,"STS16":80.58,"STS17 (en-en)":84.49,"STS22 (en)":66.28,"STSBenchmark":79.18} -{"level_0":162,"index":70,"Rank":163,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.9,"BIOSSES":71.78,"SICK-R":77.34,"STS12":70.15,"STS13":78.42,"STS14":74.76,"STS15":82.0,"STS16":78.27,"STS17 (en-en)":85.85,"STS22 (en)":61.2,"STSBenchmark":79.21} -{"level_0":163,"index":63,"Rank":164,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":75.86,"BIOSSES":84.67,"SICK-R":72.16,"STS12":61.6,"STS13":79.71,"STS14":72.11,"STS15":82.18,"STS16":79.41,"STS17 (en-en)":85.44,"STS22 (en)":63.9,"STSBenchmark":77.44} -{"level_0":164,"index":79,"Rank":165,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.74,"BIOSSES":83.02,"SICK-R":67.23,"STS12":66.59,"STS13":77.33,"STS14":71.83,"STS15":80.66,"STS16":78.91,"STS17 (en-en)":86.99,"STS22 (en)":67.3,"STSBenchmark":77.59} -{"level_0":165,"index":234,"Rank":166,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":75.6,"BIOSSES":78.34,"SICK-R":75.25,"STS12":72.96,"STS13":70.58,"STS14":70.29,"STS15":81.94,"STS16":76.8,"STS17 (en-en)":86.19,"STS22 (en)":62.88,"STSBenchmark":80.75} -{"level_0":166,"index":121,"Rank":167,"Model":"snowflake-arctic-embed-l-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.48,"BIOSSES":86.35,"SICK-R":69.32,"STS12":67.85,"STS13":77.49,"STS14":69.77,"STS15":80.16,"STS16":77.94,"STS17 (en-en)":82.28,"STS22 (en)":67.97,"STSBenchmark":75.68} -{"level_0":167,"index":99,"Rank":168,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.48,"BIOSSES":86.35,"SICK-R":69.32,"STS12":67.85,"STS13":77.49,"STS14":69.77,"STS15":80.16,"STS16":77.94,"STS17 (en-en)":82.28,"STS22 (en)":67.97,"STSBenchmark":75.68} -{"level_0":168,"index":259,"Rank":169,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.39,"BIOSSES":78.19,"SICK-R":74.43,"STS12":72.58,"STS13":72.22,"STS14":69.98,"STS15":82.22,"STS16":76.91,"STS17 (en-en)":85.22,"STS22 (en)":61.9,"STSBenchmark":80.28} -{"level_0":169,"index":78,"Rank":170,"Model":"test25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.37,"BIOSSES":76.49,"SICK-R":71.7,"STS12":69.73,"STS13":76.43,"STS14":73.66,"STS15":82.62,"STS16":79.49,"STS17 (en-en)":84.38,"STS22 (en)":60.61,"STSBenchmark":78.61} -{"level_0":170,"index":260,"Rank":171,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.35,"BIOSSES":79.14,"SICK-R":76.43,"STS12":74.25,"STS13":71.82,"STS14":71.38,"STS15":82.47,"STS16":77.54,"STS17 (en-en)":86.38,"STS22 (en)":52.85,"STSBenchmark":81.24} -{"level_0":171,"index":59,"Rank":172,"Model":"paraphrase-multilingual-mpnet-base-v2-KE_Sieve<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.11,"BIOSSES":65.29,"SICK-R":76.01,"STS12":71.25,"STS13":78.4,"STS14":74.23,"STS15":81.41,"STS16":79.13,"STS17 (en-en)":85.4,"STS22 (en)":58.63,"STSBenchmark":81.34} -{"level_0":172,"index":100,"Rank":173,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.03,"BIOSSES":86.62,"SICK-R":69.12,"STS12":66.97,"STS13":79.12,"STS14":68.51,"STS15":79.92,"STS16":78.66,"STS17 (en-en)":81.46,"STS22 (en)":65.84,"STSBenchmark":74.1} -{"level_0":173,"index":81,"Rank":174,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.71,"BIOSSES":70.93,"SICK-R":74.57,"STS12":69.17,"STS13":77.23,"STS14":70.99,"STS15":79.74,"STS16":77.93,"STS17 (en-en)":87.33,"STS22 (en)":59.64,"STSBenchmark":79.54} -{"level_0":174,"index":218,"Rank":175,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":74.33,"BIOSSES":72.31,"SICK-R":72.24,"STS12":66.05,"STS13":81.49,"STS14":73.61,"STS15":79.72,"STS16":78.12,"STS17 (en-en)":83.58,"STS22 (en)":59.65,"STSBenchmark":76.52} -{"level_0":175,"index":134,"Rank":176,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":74.23,"BIOSSES":79.76,"SICK-R":70.04,"STS12":64.38,"STS13":76.41,"STS14":69.33,"STS15":80.18,"STS16":79.57,"STS17 (en-en)":81.18,"STS22 (en)":65.51,"STSBenchmark":75.98} -{"level_0":176,"index":258,"Rank":177,"Model":"universal-sentence-encoder-large-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.05,"BIOSSES":69.36,"SICK-R":76.84,"STS12":72.49,"STS13":73.65,"STS14":71.88,"STS15":82.9,"STS16":79.83,"STS17 (en-en)":85.19,"STS22 (en)":48.44,"STSBenchmark":79.93} -{"level_0":177,"index":44,"Rank":178,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.7,"BIOSSES":70.87,"SICK-R":70.47,"STS12":64.0,"STS13":78.2,"STS14":70.78,"STS15":80.03,"STS16":78.8,"STS17 (en-en)":83.88,"STS22 (en)":64.39,"STSBenchmark":75.57} -{"level_0":178,"index":80,"Rank":179,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.41,"BIOSSES":75.21,"SICK-R":65.93,"STS12":66.53,"STS13":76.17,"STS14":69.05,"STS15":79.24,"STS16":76.07,"STS17 (en-en)":84.95,"STS22 (en)":65.66,"STSBenchmark":75.34} -{"level_0":179,"index":71,"Rank":180,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.43,"BIOSSES":77.59,"SICK-R":74.68,"STS12":54.35,"STS13":74.24,"STS14":69.99,"STS15":75.74,"STS16":73.65,"STS17 (en-en)":84.81,"STS22 (en)":62.56,"STSBenchmark":76.72} -{"level_0":180,"index":257,"Rank":181,"Model":"universal-sentence-encoder-4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.04,"BIOSSES":63.38,"SICK-R":69.79,"STS12":67.06,"STS13":71.54,"STS14":70.59,"STS15":80.27,"STS16":75.76,"STS17 (en-en)":84.94,"STS22 (en)":60.0,"STSBenchmark":77.08} -{"level_0":181,"index":67,"Rank":182,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":71.61,"BIOSSES":75.12,"SICK-R":69.34,"STS12":60.09,"STS13":72.52,"STS14":66.7,"STS15":77.69,"STS16":75.94,"STS17 (en-en)":81.67,"STS22 (en)":63.7,"STSBenchmark":73.36} -{"level_0":182,"index":127,"Rank":183,"Model":"cai-lunaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.81,"BIOSSES":73.97,"SICK-R":68.99,"STS12":58.5,"STS13":74.03,"STS14":66.18,"STS15":75.55,"STS16":73.71,"STS17 (en-en)":80.14,"STS22 (en)":65.65,"STSBenchmark":71.4} -{"level_0":183,"index":227,"Rank":184,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":70.8,"BIOSSES":78.7,"SICK-R":69.99,"STS12":65.08,"STS13":67.98,"STS14":64.03,"STS15":76.59,"STS16":72.98,"STS17 (en-en)":79.45,"STS22 (en)":60.97,"STSBenchmark":72.25} -{"level_0":184,"index":141,"Rank":185,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.23,"BIOSSES":79.11,"SICK-R":62.94,"STS12":65.46,"STS13":62.79,"STS14":57.54,"STS15":74.25,"STS16":75.73,"STS17 (en-en)":79.94,"STS22 (en)":47.12,"STSBenchmark":67.39} -{"level_0":185,"index":256,"Rank":186,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.8,"BIOSSES":55.2,"SICK-R":63.19,"STS12":54.06,"STS13":66.29,"STS14":65.84,"STS15":78.0,"STS16":70.64,"STS17 (en-en)":82.23,"STS22 (en)":54.53,"STSBenchmark":68.04} -{"level_0":186,"index":11,"Rank":187,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":63.27,"BIOSSES":62.01,"SICK-R":62.86,"STS12":62.6,"STS13":59.62,"STS14":57.03,"STS15":71.57,"STS16":70.75,"STS17 (en-en)":76.73,"STS22 (en)":39.76,"STSBenchmark":69.77} -{"level_0":187,"index":233,"Rank":188,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":62.46,"BIOSSES":50.25,"SICK-R":55.49,"STS12":53.51,"STS13":70.8,"STS14":63.56,"STS15":74.08,"STS16":64.6,"STS17 (en-en)":76.91,"STS22 (en)":53.89,"STSBenchmark":61.55} -{"level_0":188,"index":232,"Rank":189,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":61.85,"BIOSSES":44.93,"SICK-R":55.43,"STS12":54.64,"STS13":69.16,"STS14":60.81,"STS15":72.31,"STS16":65.34,"STS17 (en-en)":77.95,"STS22 (en)":56.35,"STSBenchmark":61.54} -{"level_0":189,"index":231,"Rank":190,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":61.02,"BIOSSES":64.95,"SICK-R":56.39,"STS12":62.49,"STS13":58.7,"STS14":54.87,"STS15":62.54,"STS16":64.27,"STS17 (en-en)":69.63,"STS22 (en)":55.06,"STSBenchmark":61.26} -{"level_0":190,"index":122,"Rank":191,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":54.36,"BIOSSES":54.7,"SICK-R":58.65,"STS12":30.87,"STS13":59.89,"STS14":47.73,"STS15":60.29,"STS16":63.73,"STS17 (en-en)":64.1,"STS22 (en)":56.37,"STSBenchmark":47.29} -{"level_0":191,"index":255,"Rank":192,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.1,"BIOSSES":21.34,"SICK-R":48.55,"STS12":55.59,"STS13":18.36,"STS14":28.84,"STS15":29.19,"STS16":39.05,"STS17 (en-en)":61.22,"STS22 (en)":44.45,"STSBenchmark":44.39} -{"level_0":192,"index":2,"Rank":193,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":84.17,"SICK-R":73.05,"STS12":66.59,"STS13":83.24,"STS14":73.71,"STS15":82.4,"STS16":"","STS17 (en-en)":80.9,"STS22 (en)":"","STSBenchmark":74.85} -{"level_0":193,"index":38,"Rank":207,"Model":"STS-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":"","SICK-R":"","STS12":"","STS13":"","STS14":"","STS15":"","STS16":"","STS17 (en-en)":"","STS22 (en)":"","STSBenchmark":0.98} -{"level_0":194,"index":94,"Rank":234,"Model":"bge_m3e_stella<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":"","SICK-R":"","STS12":"","STS13":"","STS14":"","STS15":"","STS16":"","STS17 (en-en)":"","STS22 (en)":"","STSBenchmark":61.83} -{"level_0":195,"index":97,"Rank":235,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":67.85,"SICK-R":57.32,"STS12":42.8,"STS13":58.77,"STS14":53.36,"STS15":69.23,"STS16":58.81,"STS17 (en-en)":68.6,"STS22 (en)":"","STSBenchmark":52.67} -{"level_0":196,"index":98,"Rank":236,"Model":"yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":"","SICK-R":79.98,"STS12":77.64,"STS13":81.23,"STS14":77.94,"STS15":86.87,"STS16":83.31,"STS17 (en-en)":87.35,"STS22 (en)":60.99,"STSBenchmark":""} -{"level_0":197,"index":124,"Rank":240,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":"","BIOSSES":83.38,"SICK-R":79.71,"STS12":78.73,"STS13":79.6,"STS14":79.0,"STS15":87.81,"STS16":85.4,"STS17 (en-en)":87.13,"STS22 (en)":"","STSBenchmark":84.85} -{"level_0":198,"index":241,"Rank":268,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","BIOSSES":74.18,"SICK-R":79.61,"STS12":76.02,"STS13":80.7,"STS14":78.85,"STS15":85.84,"STS16":81.05,"STS17 (en-en)":86.87,"STS22 (en)":"","STSBenchmark":84.42} -{"level_0":199,"index":242,"Rank":269,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","BIOSSES":76.27,"SICK-R":79.62,"STS12":77.9,"STS13":85.11,"STS14":80.81,"STS15":87.48,"STS16":83.2,"STS17 (en-en)":86.99,"STS22 (en)":"","STSBenchmark":86.82} -{"level_0":200,"index":247,"Rank":270,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":"","SICK-R":"","STS12":"","STS13":"","STS14":"","STS15":"","STS16":"","STS17 (en-en)":"","STS22 (en)":47.06,"STSBenchmark":""} -{"level_0":201,"index":262,"Rank":274,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":84.75,"SICK-R":80.74,"STS12":75.42,"STS13":85.48,"STS14":78.85,"STS15":85.23,"STS16":82.16,"STS17 (en-en)":"","STS22 (en)":"","STSBenchmark":82.69} -{"level_0":202,"index":263,"Rank":275,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":0.74,"SICK-R":0.81,"STS12":0.74,"STS13":0.83,"STS14":0.75,"STS15":0.82,"STS16":0.81,"STS17 (en-en)":"","STS22 (en)":"","STSBenchmark":0.8} -{"level_0":203,"index":278,"Rank":284,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":78.12,"SICK-R":77.02,"STS12":"","STS13":"","STS14":"","STS15":"","STS16":"","STS17 (en-en)":"","STS22 (en)":"","STSBenchmark":84.32} -{"level_0":204,"index":279,"Rank":285,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":77.46,"SICK-R":77.26,"STS12":"","STS13":"","STS14":"","STS15":"","STS16":"","STS17 (en-en)":"","STS22 (en)":"","STSBenchmark":83.02} -{"level_0":205,"index":280,"Rank":286,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","BIOSSES":68.95,"SICK-R":78.72,"STS12":"","STS13":"","STS14":"","STS15":"","STS16":"","STS17 (en-en)":"","STS22 (en)":"","STSBenchmark":84.08} +{"Rank":1,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":86.75,"BIOSSES":89.7,"SICK-R":78.44,"STS12":86.46,"STS13":87.76,"STS14":86.6,"STS15":90.1,"STS16":86.39,"STSBenchmark":88.56} +{"Rank":2,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":86.36,"BIOSSES":89.42,"SICK-R":81.67,"STS12":78.02,"STS13":90.1,"STS14":85.44,"STS15":89.64,"STS16":87.24,"STSBenchmark":89.33} +{"Rank":3,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":86.32,"BIOSSES":89.46,"SICK-R":81.93,"STS12":77.59,"STS13":90.36,"STS14":85.25,"STS15":89.66,"STS16":87.34,"STSBenchmark":88.99} +{"Rank":4,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.84,"BIOSSES":89.24,"SICK-R":83.16,"STS12":73.34,"STS13":88.49,"STS14":86.49,"STS15":91.13,"STS16":85.68,"STSBenchmark":89.22} +{"Rank":5,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":85.38,"BIOSSES":85.24,"SICK-R":83.7,"STS12":78.8,"STS13":86.37,"STS14":84.04,"STS15":88.99,"STS16":87.22,"STSBenchmark":88.65} +{"Rank":6,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":84.89,"BIOSSES":82.13,"SICK-R":83.01,"STS12":78.85,"STS13":86.84,"STS14":84.04,"STS15":88.72,"STS16":86.79,"STSBenchmark":88.72} +{"Rank":7,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":84.82,"BIOSSES":84.92,"SICK-R":83.94,"STS12":79.27,"STS13":84.83,"STS14":82.94,"STS15":88.09,"STS16":86.54,"STSBenchmark":88.05} +{"Rank":8,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":84.68,"BIOSSES":85.88,"SICK-R":82.25,"STS12":78.28,"STS13":85.52,"STS14":82.49,"STS15":88.76,"STS16":87.11,"STSBenchmark":87.16} +{"Rank":9,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.56,"BIOSSES":84.85,"SICK-R":79.71,"STS12":77.09,"STS13":88.91,"STS14":82.08,"STS15":89.21,"STS16":84.74,"STSBenchmark":89.86} +{"Rank":10,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":83.94,"BIOSSES":80.43,"SICK-R":80.47,"STS12":78.85,"STS13":88.94,"STS14":84.86,"STS15":89.32,"STS16":84.67,"STSBenchmark":84.01} +{"Rank":11,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":83.75,"BIOSSES":81.12,"SICK-R":79.15,"STS12":76.52,"STS13":88.63,"STS14":83.32,"STS15":87.5,"STS16":86.39,"STSBenchmark":87.35} +{"Rank":12,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":83.36,"BIOSSES":78.93,"SICK-R":80.34,"STS12":79.11,"STS13":87.33,"STS14":83.17,"STS15":88.28,"STS16":84.36,"STSBenchmark":85.36} +{"Rank":13,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":83.07,"BIOSSES":83.3,"SICK-R":79.27,"STS12":78.3,"STS13":85.81,"STS14":81.38,"STS15":86.79,"STS16":84.56,"STSBenchmark":85.14} +{"Rank":14,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":83.05,"BIOSSES":82.49,"SICK-R":80.23,"STS12":80.02,"STS13":81.55,"STS14":77.72,"STS15":89.31,"STS16":85.79,"STSBenchmark":87.29} +{"Rank":15,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":82.92,"BIOSSES":73.12,"SICK-R":79.98,"STS12":79.02,"STS13":88.8,"STS14":84.33,"STS15":88.89,"STS16":85.31,"STSBenchmark":83.93} +{"Rank":16,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":82.72,"BIOSSES":81.58,"SICK-R":79.24,"STS12":78.16,"STS13":86.01,"STS14":81.25,"STS15":86.51,"STS16":84.24,"STSBenchmark":84.8} +{"Rank":17,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.61,"BIOSSES":84.68,"SICK-R":79.0,"STS12":72.84,"STS13":86.1,"STS14":81.15,"STS15":88.49,"STS16":85.08,"STSBenchmark":83.56} +{"Rank":18,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.49,"BIOSSES":88.72,"SICK-R":76.73,"STS12":73.09,"STS13":84.92,"STS14":79.81,"STS15":88.01,"STS16":84.41,"STSBenchmark":84.24} +{"Rank":19,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":82.4,"BIOSSES":75.89,"SICK-R":80.18,"STS12":78.05,"STS13":85.85,"STS14":82.19,"STS15":87.46,"STS16":84.03,"STSBenchmark":85.52} +{"Rank":20,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":82.15,"BIOSSES":76.27,"SICK-R":79.62,"STS12":77.9,"STS13":85.11,"STS14":80.81,"STS15":87.48,"STS16":83.2,"STSBenchmark":86.82} +{"Rank":21,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":82.11,"BIOSSES":80.19,"SICK-R":79.09,"STS12":77.49,"STS13":85.62,"STS14":80.5,"STS15":85.84,"STS16":83.9,"STSBenchmark":84.28} +{"Rank":22,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.05,"BIOSSES":84.87,"SICK-R":79.18,"STS12":71.98,"STS13":85.52,"STS14":80.5,"STS15":87.51,"STS16":84.48,"STSBenchmark":82.34} +{"Rank":23,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":81.62,"BIOSSES":85.05,"SICK-R":78.51,"STS12":76.7,"STS13":78.02,"STS14":76.6,"STS15":88.16,"STS16":84.28,"STSBenchmark":85.64} +{"Rank":24,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.42,"BIOSSES":86.35,"SICK-R":80.6,"STS12":69.8,"STS13":83.27,"STS14":76.09,"STS15":86.12,"STS16":85.96,"STSBenchmark":83.17} +{"Rank":25,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":81.04,"BIOSSES":77.18,"SICK-R":78.76,"STS12":77.3,"STS13":84.18,"STS14":79.37,"STS15":84.69,"STS16":83.36,"STSBenchmark":83.46} +{"Rank":26,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":80.53,"BIOSSES":80.43,"SICK-R":80.59,"STS12":72.63,"STS13":83.48,"STS14":78.0,"STS15":85.66,"STS16":80.03,"STSBenchmark":83.42} +{"Rank":27,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":80.48,"BIOSSES":82.46,"SICK-R":77.51,"STS12":76.56,"STS13":76.97,"STS14":75.52,"STS15":87.12,"STS16":83.63,"STSBenchmark":84.11} +{"Rank":28,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":80.47,"BIOSSES":83.57,"SICK-R":79.32,"STS12":73.08,"STS13":82.13,"STS14":76.73,"STS15":85.58,"STS16":80.23,"STSBenchmark":83.09} +{"Rank":29,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":80.08,"BIOSSES":74.18,"SICK-R":79.61,"STS12":76.02,"STS13":80.7,"STS14":78.85,"STS15":85.84,"STS16":81.05,"STSBenchmark":84.42} +{"Rank":30,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":79.97,"BIOSSES":68.38,"SICK-R":80.77,"STS12":75.3,"STS13":84.67,"STS14":80.19,"STS15":85.4,"STS16":80.82,"STSBenchmark":84.25} +{"Rank":31,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":79.59,"BIOSSES":85.01,"SICK-R":81.47,"STS12":65.84,"STS13":78.37,"STS14":77.52,"STS15":85.43,"STS16":79.94,"STSBenchmark":83.1} +{"Rank":32,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":79.27,"BIOSSES":81.64,"SICK-R":77.58,"STS12":72.37,"STS13":80.6,"STS14":75.59,"STS15":85.39,"STS16":78.99,"STSBenchmark":82.03} +{"Rank":33,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.27,"BIOSSES":78.04,"SICK-R":77.48,"STS12":72.3,"STS13":81.49,"STS14":74.74,"STS15":84.28,"STS16":82.06,"STSBenchmark":83.78} +{"Rank":34,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":79.21,"BIOSSES":84.86,"SICK-R":73.39,"STS12":70.33,"STS13":82.19,"STS14":77.16,"STS15":86.31,"STS16":81.85,"STSBenchmark":77.6} +{"Rank":35,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":79.19,"BIOSSES":83.29,"SICK-R":75.55,"STS12":67.65,"STS13":83.9,"STS14":76.97,"STS15":83.8,"STS16":81.91,"STSBenchmark":80.42} +{"Rank":36,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":79.11,"BIOSSES":81.91,"SICK-R":74.29,"STS12":70.12,"STS13":82.72,"STS14":78.24,"STS15":86.26,"STS16":81.61,"STSBenchmark":77.73} +{"Rank":37,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":78.31,"BIOSSES":78.94,"SICK-R":73.63,"STS12":69.11,"STS13":81.82,"STS14":77.07,"STS15":86.01,"STS16":82.23,"STSBenchmark":77.65} +{"Rank":38,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":77.35,"BIOSSES":79.0,"SICK-R":71.45,"STS12":68.59,"STS13":79.09,"STS14":74.64,"STS15":84.85,"STS16":81.57,"STSBenchmark":79.58} +{"Rank":39,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":77.03,"BIOSSES":83.79,"SICK-R":68.78,"STS12":64.81,"STS13":80.1,"STS14":74.96,"STS15":83.7,"STS16":80.55,"STSBenchmark":79.54} +{"Rank":40,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":76.77,"BIOSSES":83.32,"SICK-R":70.2,"STS12":64.34,"STS13":80.03,"STS14":74.51,"STS15":83.3,"STS16":79.67,"STSBenchmark":78.81} +{"Rank":41,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":76.73,"BIOSSES":82.41,"SICK-R":71.77,"STS12":65.39,"STS13":79.26,"STS14":72.98,"STS15":82.72,"STS16":81.02,"STSBenchmark":78.32} +{"Rank":42,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":76.41,"BIOSSES":77.32,"SICK-R":72.0,"STS12":68.19,"STS13":80.4,"STS14":74.02,"STS15":82.57,"STS16":79.78,"STSBenchmark":76.97} +{"Rank":43,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":76.16,"BIOSSES":84.67,"SICK-R":72.16,"STS12":61.6,"STS13":79.71,"STS14":72.11,"STS15":82.18,"STS16":79.41,"STSBenchmark":77.44} +{"Rank":44,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":75.86,"BIOSSES":78.34,"SICK-R":75.25,"STS12":72.96,"STS13":70.58,"STS14":70.29,"STS15":81.94,"STS16":76.8,"STSBenchmark":80.75} +{"Rank":45,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":75.01,"BIOSSES":72.31,"SICK-R":72.24,"STS12":66.05,"STS13":81.49,"STS14":73.61,"STS15":79.72,"STS16":78.12,"STSBenchmark":76.52} +{"Rank":46,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":71.34,"BIOSSES":75.12,"SICK-R":69.34,"STS12":60.09,"STS13":72.52,"STS14":66.7,"STS15":77.69,"STS16":75.94,"STSBenchmark":73.36} +{"Rank":47,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":70.95,"BIOSSES":78.7,"SICK-R":69.99,"STS12":65.08,"STS13":67.98,"STS14":64.03,"STS15":76.59,"STS16":72.98,"STSBenchmark":72.25} +{"Rank":48,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":64.53,"BIOSSES":62.01,"SICK-R":62.86,"STS12":62.6,"STS13":59.62,"STS14":57.03,"STS15":71.57,"STS16":70.75,"STSBenchmark":69.77} +{"Rank":49,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":61.73,"BIOSSES":50.25,"SICK-R":55.49,"STS12":53.51,"STS13":70.8,"STS14":63.56,"STS15":74.08,"STS16":64.6,"STSBenchmark":61.55} +{"Rank":50,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":60.68,"BIOSSES":64.95,"SICK-R":56.39,"STS12":62.49,"STS13":58.7,"STS14":54.87,"STS15":62.54,"STS16":64.27,"STSBenchmark":61.26} +{"Rank":51,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":60.52,"BIOSSES":44.93,"SICK-R":55.43,"STS12":54.64,"STS13":69.16,"STS14":60.81,"STS15":72.31,"STS16":65.34,"STSBenchmark":61.54} +{"Rank":52,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":52.89,"BIOSSES":54.7,"SICK-R":58.65,"STS12":30.87,"STS13":59.89,"STS14":47.73,"STS15":60.29,"STS16":63.73,"STSBenchmark":47.29} +{"Rank":53,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":84.17,"SICK-R":73.05,"STS12":66.59,"STS13":83.24,"STS14":73.71,"STS15":82.4,"STS16":null,"STSBenchmark":74.85} +{"Rank":54,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":55,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":56,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":57,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":58,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":59,"Model":"bm25s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":60,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":61,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":62,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":63,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":64,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":65,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":66,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":67,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":68,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":69,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":70,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":71,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":72,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":73,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":74,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":75,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":76,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":77,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":78,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":79,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":80,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":81,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":82,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":83,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":84,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":85,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":86,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":87,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":88,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":89,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":90,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":91,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":92,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":93,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":94,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":95,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":96,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":97,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":98,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":99,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":100,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":101,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":102,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":103,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":104,"Model":"text-search-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":105,"Model":"text-search-ada-doc-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":106,"Model":"text-search-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":107,"Model":"text-search-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":108,"Model":"text-search-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":null,"SICK-R":null,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":null} +{"Rank":109,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":78.12,"SICK-R":77.02,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":84.32} +{"Rank":110,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":77.46,"SICK-R":77.26,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":83.02} +{"Rank":111,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"BIOSSES":68.95,"SICK-R":78.72,"STS12":null,"STS13":null,"STS14":null,"STS15":null,"STS16":null,"STSBenchmark":84.08} diff --git a/boards_data/en/data_tasks/Summarization/default.jsonl b/boards_data/en/data_tasks/Summarization/default.jsonl index 4c87424c6776ef8ed0d72cf0d8b5492b94d15e52..13d1858a50fb154ebee34f32626bf35ff5f25d3f 100644 --- a/boards_data/en/data_tasks/Summarization/default.jsonl +++ b/boards_data/en/data_tasks/Summarization/default.jsonl @@ -1,184 +1,111 @@ -{"level_0":0,"index":123,"Rank":1,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":33.6} -{"level_0":1,"index":194,"Rank":2,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.71} -{"level_0":2,"index":133,"Rank":3,"Model":"mxbai-embed-large-v1-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.71} -{"level_0":3,"index":1,"Rank":4,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"SummEval":32.63} -{"level_0":4,"index":151,"Rank":5,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.52} -{"level_0":5,"index":137,"Rank":6,"Model":"stella-base-en-v2-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.52} -{"level_0":6,"index":98,"Rank":7,"Model":"yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.47} -{"level_0":7,"index":0,"Rank":8,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"SummEval":32.36} -{"level_0":8,"index":149,"Rank":9,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"SummEval":32.32} -{"level_0":9,"index":53,"Rank":10,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.12} -{"level_0":10,"index":168,"Rank":11,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.06} -{"level_0":11,"index":165,"Rank":12,"Model":"UAE-Large-V1-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.03} -{"level_0":12,"index":111,"Rank":13,"Model":"UAE-Large-V1-Q5_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.03} -{"level_0":13,"index":108,"Rank":14,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":32.03} -{"level_0":14,"index":197,"Rank":15,"Model":"b1ade-embed-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.93} -{"level_0":15,"index":261,"Rank":16,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.93} -{"level_0":16,"index":148,"Rank":17,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"SummEval":31.84} -{"level_0":17,"index":103,"Rank":18,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.75} -{"level_0":18,"index":253,"Rank":19,"Model":"gte-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.66} -{"level_0":19,"index":139,"Rank":20,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.66} -{"level_0":20,"index":150,"Rank":21,"Model":"bge-large-en-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.61} -{"level_0":21,"index":114,"Rank":22,"Model":"bge-large-en-v1.5-Q4_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.61} -{"level_0":22,"index":170,"Rank":23,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.61} -{"level_0":23,"index":22,"Rank":24,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"SummEval":31.61} -{"level_0":24,"index":135,"Rank":25,"Model":"jina-embeddings-v2-base-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.6} -{"level_0":25,"index":175,"Rank":26,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"SummEval":31.6} -{"level_0":26,"index":242,"Rank":27,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"SummEval":31.57} -{"level_0":27,"index":174,"Rank":28,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.55} -{"level_0":28,"index":193,"Rank":29,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.55} -{"level_0":29,"index":138,"Rank":30,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.49} -{"level_0":30,"index":15,"Rank":31,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"SummEval":31.46} -{"level_0":31,"index":83,"Rank":32,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.46} -{"level_0":32,"index":63,"Rank":33,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"SummEval":31.45} -{"level_0":33,"index":156,"Rank":34,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"SummEval":31.4} -{"level_0":34,"index":243,"Rank":35,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.39} -{"level_0":35,"index":157,"Rank":36,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"SummEval":31.39} -{"level_0":36,"index":101,"Rank":37,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.38} -{"level_0":37,"index":61,"Rank":38,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"SummEval":31.38} -{"level_0":38,"index":205,"Rank":39,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.35} -{"level_0":39,"index":126,"Rank":40,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.35} -{"level_0":40,"index":17,"Rank":41,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"SummEval":31.35} -{"level_0":41,"index":33,"Rank":42,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.29} -{"level_0":42,"index":207,"Rank":43,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.28} -{"level_0":43,"index":69,"Rank":44,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.27} -{"level_0":44,"index":173,"Rank":45,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.25} -{"level_0":45,"index":44,"Rank":46,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.23} -{"level_0":46,"index":67,"Rank":47,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"SummEval":31.23} -{"level_0":47,"index":116,"Rank":48,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.22} -{"level_0":48,"index":215,"Rank":49,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"SummEval":31.2} -{"level_0":49,"index":252,"Rank":50,"Model":"gte-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.17} -{"level_0":50,"index":217,"Rank":51,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.17} -{"level_0":51,"index":16,"Rank":52,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.17} -{"level_0":52,"index":204,"Rank":53,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.17} -{"level_0":53,"index":18,"Rank":54,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.17} -{"level_0":54,"index":96,"Rank":55,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"SummEval":31.16} -{"level_0":55,"index":158,"Rank":56,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.16} -{"level_0":56,"index":105,"Rank":57,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.16} -{"level_0":57,"index":218,"Rank":58,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.15} -{"level_0":58,"index":118,"Rank":59,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.14} -{"level_0":59,"index":284,"Rank":60,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.12} -{"level_0":60,"index":166,"Rank":61,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.1} -{"level_0":61,"index":182,"Rank":62,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.07} -{"level_0":62,"index":180,"Rank":63,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.07} -{"level_0":63,"index":120,"Rank":64,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.07} -{"level_0":64,"index":181,"Rank":65,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.07} -{"level_0":65,"index":20,"Rank":66,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"SummEval":31.07} -{"level_0":66,"index":179,"Rank":67,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.07} -{"level_0":67,"index":227,"Rank":68,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"SummEval":31.05} -{"level_0":68,"index":134,"Rank":69,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"SummEval":31.05} -{"level_0":69,"index":285,"Rank":70,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.03} -{"level_0":70,"index":82,"Rank":71,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.03} -{"level_0":71,"index":152,"Rank":72,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.01} -{"level_0":72,"index":9,"Rank":73,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"SummEval":31.01} -{"level_0":73,"index":183,"Rank":74,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.01} -{"level_0":74,"index":36,"Rank":75,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.99} -{"level_0":75,"index":58,"Rank":76,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"SummEval":30.98} -{"level_0":76,"index":68,"Rank":77,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.97} -{"level_0":77,"index":8,"Rank":78,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.97} -{"level_0":78,"index":169,"Rank":79,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.97} -{"level_0":79,"index":154,"Rank":80,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"SummEval":30.97} -{"level_0":80,"index":117,"Rank":81,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.96} -{"level_0":81,"index":62,"Rank":82,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"SummEval":30.94} -{"level_0":82,"index":19,"Rank":83,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.91} -{"level_0":83,"index":115,"Rank":84,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.87} -{"level_0":84,"index":51,"Rank":85,"Model":"neural-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.87} -{"level_0":85,"index":121,"Rank":86,"Model":"snowflake-arctic-embed-l-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.84} -{"level_0":86,"index":99,"Rank":87,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.84} -{"level_0":87,"index":6,"Rank":88,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.84} -{"level_0":88,"index":219,"Rank":89,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.83} -{"level_0":89,"index":186,"Rank":90,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.82} -{"level_0":90,"index":229,"Rank":91,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"SummEval":30.81} -{"level_0":91,"index":281,"Rank":92,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.8} -{"level_0":92,"index":259,"Rank":93,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.79} -{"level_0":93,"index":21,"Rank":94,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.77} -{"level_0":94,"index":178,"Rank":95,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.73} -{"level_0":95,"index":208,"Rank":96,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.72} -{"level_0":96,"index":72,"Rank":97,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.71} -{"level_0":97,"index":171,"Rank":98,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.71} -{"level_0":98,"index":95,"Rank":99,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.71} -{"level_0":99,"index":241,"Rank":100,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"SummEval":30.67} -{"level_0":100,"index":238,"Rank":101,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"SummEval":30.64} -{"level_0":101,"index":97,"Rank":102,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.62} -{"level_0":102,"index":167,"Rank":103,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.62} -{"level_0":103,"index":125,"Rank":104,"Model":"NoInstruct-small-Embedding-v0-Q4_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.6} -{"level_0":104,"index":119,"Rank":105,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.6} -{"level_0":105,"index":262,"Rank":106,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.57} -{"level_0":106,"index":177,"Rank":107,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.56} -{"level_0":107,"index":141,"Rank":108,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.56} -{"level_0":108,"index":136,"Rank":109,"Model":"jina-embeddings-v2-small-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.56} -{"level_0":109,"index":77,"Rank":110,"Model":"test24<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.52} -{"level_0":110,"index":233,"Rank":111,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"SummEval":30.49} -{"level_0":111,"index":211,"Rank":112,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":30.47} -{"level_0":112,"index":79,"Rank":113,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.43} -{"level_0":113,"index":254,"Rank":114,"Model":"gte-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.42} -{"level_0":114,"index":35,"Rank":115,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.41} -{"level_0":115,"index":213,"Rank":116,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.4} -{"level_0":116,"index":161,"Rank":117,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.39} -{"level_0":117,"index":84,"Rank":118,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.38} -{"level_0":118,"index":214,"Rank":119,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":30.36} -{"level_0":119,"index":100,"Rank":120,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.31} -{"level_0":120,"index":70,"Rank":121,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.3} -{"level_0":121,"index":153,"Rank":122,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":30.28} -{"level_0":122,"index":140,"Rank":123,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":30.28} -{"level_0":123,"index":81,"Rank":124,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.26} -{"level_0":124,"index":42,"Rank":125,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"SummEval":30.26} -{"level_0":125,"index":184,"Rank":126,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.25} -{"level_0":126,"index":237,"Rank":127,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"SummEval":30.21} -{"level_0":127,"index":93,"Rank":128,"Model":"e5-large-v2-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.19} -{"level_0":128,"index":65,"Rank":129,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"SummEval":30.19} -{"level_0":129,"index":155,"Rank":130,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"SummEval":30.19} -{"level_0":130,"index":34,"Rank":131,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.18} -{"level_0":131,"index":24,"Rank":132,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"SummEval":30.12} -{"level_0":132,"index":159,"Rank":133,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"SummEval":30.11} -{"level_0":133,"index":248,"Rank":134,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.1} -{"level_0":134,"index":104,"Rank":135,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.1} -{"level_0":135,"index":206,"Rank":136,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"SummEval":30.08} -{"level_0":136,"index":27,"Rank":137,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.08} -{"level_0":137,"index":28,"Rank":138,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.08} -{"level_0":138,"index":26,"Rank":139,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.08} -{"level_0":139,"index":246,"Rank":140,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"SummEval":30.08} -{"level_0":140,"index":129,"Rank":141,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"SummEval":30.08} -{"level_0":141,"index":29,"Rank":142,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.08} -{"level_0":142,"index":210,"Rank":143,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":30.05} -{"level_0":143,"index":66,"Rank":144,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"SummEval":30.01} -{"level_0":144,"index":162,"Rank":145,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"SummEval":29.98} -{"level_0":145,"index":64,"Rank":146,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"SummEval":29.96} -{"level_0":146,"index":282,"Rank":147,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.92} -{"level_0":147,"index":283,"Rank":148,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.92} -{"level_0":148,"index":71,"Rank":149,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.91} -{"level_0":149,"index":245,"Rank":150,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"SummEval":29.91} -{"level_0":150,"index":106,"Rank":151,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.87} -{"level_0":151,"index":176,"Rank":152,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.87} -{"level_0":152,"index":147,"Rank":153,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":29.85} -{"level_0":153,"index":172,"Rank":154,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.85} -{"level_0":154,"index":113,"Rank":155,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.83} -{"level_0":155,"index":43,"Rank":156,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.82} -{"level_0":156,"index":122,"Rank":157,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":29.82} -{"level_0":157,"index":267,"Rank":158,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.78} -{"level_0":158,"index":260,"Rank":159,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.71} -{"level_0":159,"index":80,"Rank":160,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.71} -{"level_0":160,"index":160,"Rank":161,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"SummEval":29.69} -{"level_0":161,"index":235,"Rank":162,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":29.67} -{"level_0":162,"index":244,"Rank":163,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"SummEval":29.64} -{"level_0":163,"index":209,"Rank":164,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":29.59} -{"level_0":164,"index":256,"Rank":165,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.51} -{"level_0":165,"index":236,"Rank":166,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"SummEval":29.5} -{"level_0":166,"index":239,"Rank":167,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":29.5} -{"level_0":167,"index":107,"Rank":168,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.47} -{"level_0":168,"index":257,"Rank":169,"Model":"universal-sentence-encoder-4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.42} -{"level_0":169,"index":78,"Rank":170,"Model":"test25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.33} -{"level_0":170,"index":185,"Rank":171,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.02} -{"level_0":171,"index":127,"Rank":172,"Model":"cai-lunaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.02} -{"level_0":172,"index":258,"Rank":173,"Model":"universal-sentence-encoder-large-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.01} -{"level_0":173,"index":232,"Rank":174,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"SummEval":28.87} -{"level_0":174,"index":60,"Rank":175,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"SummEval":28.49} -{"level_0":175,"index":212,"Rank":176,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":28.41} -{"level_0":176,"index":228,"Rank":177,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"SummEval":27.9} -{"level_0":177,"index":231,"Rank":178,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":27.66} -{"level_0":178,"index":112,"Rank":179,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":27.54} -{"level_0":179,"index":230,"Rank":180,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":27.49} -{"level_0":180,"index":277,"Rank":181,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":26.94} -{"level_0":181,"index":11,"Rank":182,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"SummEval":26.8} -{"level_0":182,"index":23,"Rank":183,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":0.31} -{"level_0":183,"index":263,"Rank":184,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":0.29} +{"Rank":1,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"SummEval":32.63} +{"Rank":2,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"SummEval":32.36} +{"Rank":3,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"SummEval":31.57} +{"Rank":4,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"SummEval":31.46} +{"Rank":5,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"SummEval":31.45} +{"Rank":6,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.39} +{"Rank":7,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"SummEval":31.38} +{"Rank":8,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"SummEval":31.23} +{"Rank":9,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.17} +{"Rank":10,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.15} +{"Rank":11,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.12} +{"Rank":12,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"SummEval":31.05} +{"Rank":13,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.03} +{"Rank":14,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"SummEval":31.01} +{"Rank":15,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.97} +{"Rank":16,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"SummEval":30.94} +{"Rank":17,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.84} +{"Rank":18,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"SummEval":30.81} +{"Rank":19,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.8} +{"Rank":20,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"SummEval":30.67} +{"Rank":21,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"SummEval":30.64} +{"Rank":22,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"SummEval":30.49} +{"Rank":23,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":30.47} +{"Rank":24,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":30.36} +{"Rank":25,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"SummEval":30.26} +{"Rank":26,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"SummEval":30.23} +{"Rank":27,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"SummEval":30.21} +{"Rank":28,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"SummEval":30.19} +{"Rank":29,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"SummEval":30.08} +{"Rank":30,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":30.05} +{"Rank":31,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"SummEval":30.04} +{"Rank":32,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"SummEval":30.01} +{"Rank":33,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"SummEval":29.96} +{"Rank":34,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.92} +{"Rank":35,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.92} +{"Rank":36,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"SummEval":29.91} +{"Rank":37,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":29.82} +{"Rank":38,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":29.67} +{"Rank":39,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"SummEval":29.65} +{"Rank":40,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"SummEval":29.64} +{"Rank":41,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":29.59} +{"Rank":42,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":29.5} +{"Rank":43,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"SummEval":29.5} +{"Rank":44,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"SummEval":28.87} +{"Rank":45,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"SummEval":28.49} +{"Rank":46,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":28.41} +{"Rank":47,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"SummEval":27.9} +{"Rank":48,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":27.66} +{"Rank":49,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":27.49} +{"Rank":50,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":26.94} +{"Rank":51,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"SummEval":26.8} +{"Rank":52,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":53,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":54,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":55,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":56,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":57,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":58,"Model":"bm25s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":59,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"SummEval":null} +{"Rank":60,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"SummEval":null} +{"Rank":61,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":62,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":63,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"SummEval":null} +{"Rank":64,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"SummEval":null} +{"Rank":65,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"SummEval":null} +{"Rank":66,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":null} +{"Rank":67,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":null} +{"Rank":68,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":null} +{"Rank":69,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"SummEval":null} +{"Rank":70,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"SummEval":null} +{"Rank":71,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"SummEval":null} +{"Rank":72,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"SummEval":null} +{"Rank":73,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"SummEval":null} +{"Rank":74,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"SummEval":null} +{"Rank":75,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":null} +{"Rank":76,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"SummEval":null} +{"Rank":77,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"SummEval":null} +{"Rank":78,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"SummEval":null} +{"Rank":79,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"SummEval":null} +{"Rank":80,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":null} +{"Rank":81,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":null} +{"Rank":82,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"SummEval":null} +{"Rank":83,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"SummEval":null} +{"Rank":84,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"SummEval":null} +{"Rank":85,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":null} +{"Rank":86,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"SummEval":null} +{"Rank":87,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"SummEval":null} +{"Rank":88,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"SummEval":null} +{"Rank":89,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"SummEval":null} +{"Rank":90,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"SummEval":null} +{"Rank":91,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":92,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":93,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"SummEval":null} +{"Rank":94,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"SummEval":null} +{"Rank":95,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"SummEval":null} +{"Rank":96,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"SummEval":null} +{"Rank":97,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"SummEval":null} +{"Rank":98,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"SummEval":null} +{"Rank":99,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":100,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":101,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":102,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"SummEval":null} +{"Rank":103,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"SummEval":null} +{"Rank":104,"Model":"text-search-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":105,"Model":"text-search-ada-doc-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":106,"Model":"text-search-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":107,"Model":"text-search-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":108,"Model":"text-search-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":109,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":110,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} +{"Rank":111,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":null} diff --git a/boards_data/fr/data_overall/default.jsonl b/boards_data/fr/data_overall/default.jsonl index aab26e04a1aac14cbb9014895d80451884dacf40..a820f841614c96dcf6b6bbe5d17ddd77479675db 100644 --- a/boards_data/fr/data_overall/default.jsonl +++ b/boards_data/fr/data_overall/default.jsonl @@ -1,67 +1,57 @@ -{"index":9,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Embedding Dimensions":3584,"Max Tokens":32768,"Average (26 datasets)":68.25,"Classification Average (6 datasets)":81.76,"Clustering Average (7 datasets)":55.56,"PairClassification Average (2 datasets)":90.43,"Reranking Average (2 datasets)":78.7,"Retrieval Average (5 datasets)":55.65,"STS Average (3 datasets)":82.31,"Summarization Average (1 datasets)":31.45} -{"index":8,"Rank":2,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":66.6,"Classification Average (6 datasets)":78.02,"Clustering Average (7 datasets)":55.01,"PairClassification Average (2 datasets)":86.88,"Reranking Average (2 datasets)":83.76,"Retrieval Average (5 datasets)":52.56,"STS Average (3 datasets)":81.26,"Summarization Average (1 datasets)":30.5} -{"index":22,"Rank":3,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":62.47,"Classification Average (6 datasets)":69.16,"Clustering Average (7 datasets)":51.26,"PairClassification Average (2 datasets)":79.64,"Reranking Average (2 datasets)":79.79,"Retrieval Average (5 datasets)":50.09,"STS Average (3 datasets)":83.62,"Summarization Average (1 datasets)":30.3} -{"index":4,"Rank":4,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":32000,"Average (26 datasets)":61.65,"Classification Average (6 datasets)":68.56,"Clustering Average (7 datasets)":46.57,"PairClassification Average (2 datasets)":78.66,"Reranking Average (2 datasets)":82.59,"Retrieval Average (5 datasets)":54.56,"STS Average (3 datasets)":80.13,"Summarization Average (1 datasets)":29.96} -{"index":3,"Rank":5,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":16000,"Average (26 datasets)":60.58,"Classification Average (6 datasets)":68.45,"Clustering Average (7 datasets)":44.23,"PairClassification Average (2 datasets)":77.3,"Reranking Average (2 datasets)":82.06,"Retrieval Average (5 datasets)":52.98,"STS Average (3 datasets)":80.29,"Summarization Average (1 datasets)":30.34} -{"index":0,"Rank":6,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":"","Average (26 datasets)":59.41,"Classification Average (6 datasets)":68.61,"Clustering Average (7 datasets)":44.74,"PairClassification Average (2 datasets)":77.32,"Reranking Average (2 datasets)":80.46,"Retrieval Average (5 datasets)":46.81,"STS Average (3 datasets)":79.56,"Summarization Average (1 datasets)":31.47} -{"index":21,"Rank":7,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":59.09,"Classification Average (6 datasets)":66.74,"Clustering Average (7 datasets)":47.45,"PairClassification Average (2 datasets)":77.01,"Reranking Average (2 datasets)":77.14,"Retrieval Average (5 datasets)":44.99,"STS Average (3 datasets)":80.12,"Summarization Average (1 datasets)":30.16} -{"index":2,"Rank":8,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":16000,"Average (26 datasets)":58.53,"Classification Average (6 datasets)":67.44,"Clustering Average (7 datasets)":45.46,"PairClassification Average (2 datasets)":76.85,"Reranking Average (2 datasets)":78.78,"Retrieval Average (5 datasets)":45.33,"STS Average (3 datasets)":77.52,"Summarization Average (1 datasets)":28.34} -{"index":29,"Rank":9,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":58.03,"Classification Average (6 datasets)":70.02,"Clustering Average (7 datasets)":39.65,"PairClassification Average (2 datasets)":77.1,"Reranking Average (2 datasets)":76.58,"Retrieval Average (5 datasets)":46.78,"STS Average (3 datasets)":80.01,"Summarization Average (1 datasets)":29.69} -{"index":72,"Rank":10,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":57.81,"Classification Average (6 datasets)":67.36,"Clustering Average (7 datasets)":43.25,"PairClassification Average (2 datasets)":78.96,"Reranking Average (2 datasets)":76.76,"Retrieval Average (5 datasets)":43.84,"STS Average (3 datasets)":78.37,"Summarization Average (1 datasets)":30.39} -{"index":24,"Rank":11,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":57.77,"Classification Average (6 datasets)":65.18,"Clustering Average (7 datasets)":46.25,"PairClassification Average (2 datasets)":75.61,"Reranking Average (2 datasets)":76.0,"Retrieval Average (5 datasets)":42.92,"STS Average (3 datasets)":79.32,"Summarization Average (1 datasets)":31.55} -{"index":23,"Rank":12,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":57.73,"Classification Average (6 datasets)":66.54,"Clustering Average (7 datasets)":38.61,"PairClassification Average (2 datasets)":78.3,"Reranking Average (2 datasets)":80.35,"Retrieval Average (5 datasets)":46.68,"STS Average (3 datasets)":83.08,"Summarization Average (1 datasets)":31.62} -{"index":54,"Rank":13,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":57.19,"Classification Average (6 datasets)":65.96,"Clustering Average (7 datasets)":41.82,"PairClassification Average (2 datasets)":80.15,"Reranking Average (2 datasets)":76.43,"Retrieval Average (5 datasets)":44.41,"STS Average (3 datasets)":77.72,"Summarization Average (1 datasets)":30.22} -{"index":53,"Rank":14,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":57.13,"Classification Average (6 datasets)":67.11,"Clustering Average (7 datasets)":40.4,"PairClassification Average (2 datasets)":79.57,"Reranking Average (2 datasets)":75.48,"Retrieval Average (5 datasets)":44.9,"STS Average (3 datasets)":77.64,"Summarization Average (1 datasets)":32.46} -{"index":36,"Rank":15,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Embedding Dimensions":1024,"Max Tokens":512,"Average (26 datasets)":56.67,"Classification Average (6 datasets)":65.82,"Clustering Average (7 datasets)":43.17,"PairClassification Average (2 datasets)":77.11,"Reranking Average (2 datasets)":72.89,"Retrieval Average (5 datasets)":40.04,"STS Average (3 datasets)":81.74,"Summarization Average (1 datasets)":30.88} -{"index":52,"Rank":16,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":56.47,"Classification Average (6 datasets)":67.4,"Clustering Average (7 datasets)":39.81,"PairClassification Average (2 datasets)":80.23,"Reranking Average (2 datasets)":75.2,"Retrieval Average (5 datasets)":43.85,"STS Average (3 datasets)":74.91,"Summarization Average (1 datasets)":30.27} -{"index":80,"Rank":17,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":56.39,"Classification Average (6 datasets)":65.92,"Clustering Average (7 datasets)":39.86,"PairClassification Average (2 datasets)":76.97,"Reranking Average (2 datasets)":75.75,"Retrieval Average (5 datasets)":42.96,"STS Average (3 datasets)":80.59,"Summarization Average (1 datasets)":29.65} -{"index":50,"Rank":18,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":56.23,"Classification Average (6 datasets)":67.57,"Clustering Average (7 datasets)":37.9,"PairClassification Average (2 datasets)":77.1,"Reranking Average (2 datasets)":75.3,"Retrieval Average (5 datasets)":43.58,"STS Average (3 datasets)":79.5,"Summarization Average (1 datasets)":30.13} -{"index":44,"Rank":19,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":56.19,"Classification Average (6 datasets)":66.8,"Clustering Average (7 datasets)":42.66,"PairClassification Average (2 datasets)":74.82,"Reranking Average (2 datasets)":71.76,"Retrieval Average (5 datasets)":41.19,"STS Average (3 datasets)":77.22,"Summarization Average (1 datasets)":30.76} -{"index":45,"Rank":20,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (26 datasets)":56.07,"Classification Average (6 datasets)":68.39,"Clustering Average (7 datasets)":38.7,"PairClassification Average (2 datasets)":76.19,"Reranking Average (2 datasets)":72.14,"Retrieval Average (5 datasets)":42.17,"STS Average (3 datasets)":79.37,"Summarization Average (1 datasets)":30.92} -{"index":15,"Rank":21,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":512,"Average (26 datasets)":56.02,"Classification Average (6 datasets)":67.08,"Clustering Average (7 datasets)":40.7,"PairClassification Average (2 datasets)":77.67,"Reranking Average (2 datasets)":68.36,"Retrieval Average (5 datasets)":40.42,"STS Average (3 datasets)":81.28,"Summarization Average (1 datasets)":31.26} -{"index":71,"Rank":22,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":55.58,"Classification Average (6 datasets)":65.09,"Clustering Average (7 datasets)":41.6,"PairClassification Average (2 datasets)":77.5,"Reranking Average (2 datasets)":73.18,"Retrieval Average (5 datasets)":39.69,"STS Average (3 datasets)":77.33,"Summarization Average (1 datasets)":31.59} -{"index":30,"Rank":23,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Embedding Dimensions":2048,"Max Tokens":2048,"Average (26 datasets)":54.25,"Classification Average (6 datasets)":64.57,"Clustering Average (7 datasets)":41.7,"PairClassification Average (2 datasets)":77.28,"Reranking Average (2 datasets)":67.95,"Retrieval Average (5 datasets)":36.81,"STS Average (3 datasets)":75.87,"Summarization Average (1 datasets)":29.04} -{"index":76,"Rank":24,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":512,"Max Tokens":512,"Average (26 datasets)":53.77,"Classification Average (6 datasets)":64.41,"Clustering Average (7 datasets)":42.0,"PairClassification Average (2 datasets)":71.92,"Reranking Average (2 datasets)":65.04,"Retrieval Average (5 datasets)":38.16,"STS Average (3 datasets)":74.92,"Summarization Average (1 datasets)":28.21} -{"index":1,"Rank":25,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":1024,"Average (26 datasets)":53.75,"Classification Average (6 datasets)":59.84,"Clustering Average (7 datasets)":41.88,"PairClassification Average (2 datasets)":74.36,"Reranking Average (2 datasets)":73.1,"Retrieval Average (5 datasets)":40.81,"STS Average (3 datasets)":71.82,"Summarization Average (1 datasets)":30.88} -{"index":68,"Rank":26,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":53.55,"Classification Average (6 datasets)":64.64,"Clustering Average (7 datasets)":39.11,"PairClassification Average (2 datasets)":75.8,"Reranking Average (2 datasets)":68.78,"Retrieval Average (5 datasets)":35.49,"STS Average (3 datasets)":78.18,"Summarization Average (1 datasets)":29.47} -{"index":47,"Rank":27,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (26 datasets)":53.4,"Classification Average (6 datasets)":63.13,"Clustering Average (7 datasets)":38.93,"PairClassification Average (2 datasets)":74.1,"Reranking Average (2 datasets)":71.44,"Retrieval Average (5 datasets)":36.55,"STS Average (3 datasets)":77.17,"Summarization Average (1 datasets)":31.85} -{"index":70,"Rank":28,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":52.94,"Classification Average (6 datasets)":61.64,"Clustering Average (7 datasets)":40.6,"PairClassification Average (2 datasets)":75.39,"Reranking Average (2 datasets)":68.88,"Retrieval Average (5 datasets)":35.66,"STS Average (3 datasets)":75.14,"Summarization Average (1 datasets)":30.23} -{"index":14,"Rank":29,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":384,"Max Tokens":512,"Average (26 datasets)":52.87,"Classification Average (6 datasets)":61.3,"Clustering Average (7 datasets)":38.91,"PairClassification Average (2 datasets)":74.12,"Reranking Average (2 datasets)":69.82,"Retrieval Average (5 datasets)":36.1,"STS Average (3 datasets)":78.26,"Summarization Average (1 datasets)":31.4} -{"index":77,"Rank":30,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":512,"Max Tokens":512,"Average (26 datasets)":52.82,"Classification Average (6 datasets)":66.37,"Clustering Average (7 datasets)":36.35,"PairClassification Average (2 datasets)":73.5,"Reranking Average (2 datasets)":66.26,"Retrieval Average (5 datasets)":37.78,"STS Average (3 datasets)":74.55,"Summarization Average (1 datasets)":28.56} -{"index":63,"Rank":31,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":512,"Max Tokens":512,"Average (26 datasets)":51.31,"Classification Average (6 datasets)":62.14,"Clustering Average (7 datasets)":38.7,"PairClassification Average (2 datasets)":71.57,"Reranking Average (2 datasets)":63.28,"Retrieval Average (5 datasets)":33.22,"STS Average (3 datasets)":75.46,"Summarization Average (1 datasets)":28.12} -{"index":57,"Rank":32,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":50.87,"Classification Average (6 datasets)":64.6,"Clustering Average (7 datasets)":38.32,"PairClassification Average (2 datasets)":74.3,"Reranking Average (2 datasets)":61.4,"Retrieval Average (5 datasets)":28.47,"STS Average (3 datasets)":74.33,"Summarization Average (1 datasets)":30.16} -{"index":67,"Rank":33,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (26 datasets)":50.71,"Classification Average (6 datasets)":61.02,"Clustering Average (7 datasets)":38.35,"PairClassification Average (2 datasets)":74.47,"Reranking Average (2 datasets)":62.02,"Retrieval Average (5 datasets)":31.24,"STS Average (3 datasets)":75.18,"Summarization Average (1 datasets)":29.2} -{"index":69,"Rank":34,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":50.63,"Classification Average (6 datasets)":58.08,"Clustering Average (7 datasets)":40.07,"PairClassification Average (2 datasets)":72.38,"Reranking Average (2 datasets)":64.08,"Retrieval Average (5 datasets)":32.18,"STS Average (3 datasets)":74.49,"Summarization Average (1 datasets)":30.01} -{"index":35,"Rank":35,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":49.61,"Classification Average (6 datasets)":56.63,"Clustering Average (7 datasets)":35.44,"PairClassification Average (2 datasets)":74.74,"Reranking Average (2 datasets)":64.22,"Retrieval Average (5 datasets)":32.37,"STS Average (3 datasets)":77.79,"Summarization Average (1 datasets)":28.77} -{"index":43,"Rank":36,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (26 datasets)":48.33,"Classification Average (6 datasets)":57.72,"Clustering Average (7 datasets)":41.16,"PairClassification Average (2 datasets)":76.08,"Reranking Average (2 datasets)":62.2,"Retrieval Average (5 datasets)":23.44,"STS Average (3 datasets)":65.36,"Summarization Average (1 datasets)":32.22} -{"index":75,"Rank":37,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":47.6,"Classification Average (6 datasets)":55.78,"Clustering Average (7 datasets)":33.66,"PairClassification Average (2 datasets)":78.81,"Reranking Average (2 datasets)":60.88,"Retrieval Average (5 datasets)":24.76,"STS Average (3 datasets)":78.28,"Summarization Average (1 datasets)":29.33} -{"index":7,"Rank":38,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":45.65,"Classification Average (6 datasets)":60.98,"Clustering Average (7 datasets)":41.21,"PairClassification Average (2 datasets)":80.54,"Reranking Average (2 datasets)":49.98,"Retrieval Average (5 datasets)":12.94,"STS Average (3 datasets)":59.33,"Summarization Average (1 datasets)":28.72} -{"index":66,"Rank":39,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (26 datasets)":44.91,"Classification Average (6 datasets)":50.9,"Clustering Average (7 datasets)":32.84,"PairClassification Average (2 datasets)":72.72,"Reranking Average (2 datasets)":52.68,"Retrieval Average (5 datasets)":30.69,"STS Average (3 datasets)":66.86,"Summarization Average (1 datasets)":27.59} -{"index":48,"Rank":40,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":2048,"Average (26 datasets)":43.46,"Classification Average (6 datasets)":54.9,"Clustering Average (7 datasets)":35.04,"PairClassification Average (2 datasets)":73.76,"Reranking Average (2 datasets)":50.86,"Retrieval Average (5 datasets)":17.91,"STS Average (3 datasets)":62.34,"Summarization Average (1 datasets)":29.48} -{"index":5,"Rank":41,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Embedding Dimensions":1024,"Max Tokens":"N\/A","Average (26 datasets)":41.91,"Classification Average (6 datasets)":53.31,"Clustering Average (7 datasets)":29.92,"PairClassification Average (2 datasets)":81.65,"Reranking Average (2 datasets)":45.61,"Retrieval Average (5 datasets)":16.19,"STS Average (3 datasets)":64.46,"Summarization Average (1 datasets)":31.56} -{"index":42,"Rank":42,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":41.89,"Classification Average (6 datasets)":48.37,"Clustering Average (7 datasets)":40.43,"PairClassification Average (2 datasets)":70.32,"Reranking Average (2 datasets)":52.62,"Retrieval Average (5 datasets)":13.89,"STS Average (3 datasets)":56.57,"Summarization Average (1 datasets)":30.72} -{"index":17,"Rank":43,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":37.22,"Classification Average (6 datasets)":46.09,"Clustering Average (7 datasets)":34.09,"PairClassification Average (2 datasets)":70.08,"Reranking Average (2 datasets)":44.73,"Retrieval Average (5 datasets)":8.49,"STS Average (3 datasets)":50.47,"Summarization Average (1 datasets)":29.13} -{"index":16,"Rank":44,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":37.18,"Classification Average (6 datasets)":46.1,"Clustering Average (7 datasets)":33.94,"PairClassification Average (2 datasets)":70.1,"Reranking Average (2 datasets)":44.73,"Retrieval Average (5 datasets)":8.5,"STS Average (3 datasets)":50.44,"Summarization Average (1 datasets)":29.06} -{"index":18,"Rank":45,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":37.12,"Classification Average (6 datasets)":46.09,"Clustering Average (7 datasets)":33.96,"PairClassification Average (2 datasets)":70.09,"Reranking Average (2 datasets)":44.75,"Retrieval Average (5 datasets)":8.51,"STS Average (3 datasets)":49.93,"Summarization Average (1 datasets)":28.84} -{"index":41,"Rank":46,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":37.1,"Classification Average (6 datasets)":46.1,"Clustering Average (7 datasets)":33.82,"PairClassification Average (2 datasets)":70.08,"Reranking Average (2 datasets)":44.74,"Retrieval Average (5 datasets)":8.52,"STS Average (3 datasets)":50.02,"Summarization Average (1 datasets)":28.81} -{"index":49,"Rank":47,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":2048,"Average (26 datasets)":31.66,"Classification Average (6 datasets)":30.38,"Clustering Average (7 datasets)":29.03,"PairClassification Average (2 datasets)":70.9,"Reranking Average (2 datasets)":39.82,"Retrieval Average (5 datasets)":7.98,"STS Average (3 datasets)":50.89,"Summarization Average (1 datasets)":23.63} -{"index":79,"Rank":48,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (26 datasets)":30.66,"Classification Average (6 datasets)":30.71,"Clustering Average (7 datasets)":28.62,"PairClassification Average (2 datasets)":68.56,"Reranking Average (2 datasets)":39.01,"Retrieval Average (5 datasets)":4.14,"STS Average (3 datasets)":49.27,"Summarization Average (1 datasets)":28.89} -{"index":78,"Rank":49,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":30.35,"Classification Average (6 datasets)":31.21,"Clustering Average (7 datasets)":28.42,"PairClassification Average (2 datasets)":68.4,"Reranking Average (2 datasets)":34.66,"Retrieval Average (5 datasets)":3.2,"STS Average (3 datasets)":50.52,"Summarization Average (1 datasets)":29.14} -{"index":38,"Rank":50,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":30.05,"Classification Average (6 datasets)":24.91,"Clustering Average (7 datasets)":27.18,"PairClassification Average (2 datasets)":67.02,"Reranking Average (2 datasets)":45.34,"Retrieval Average (5 datasets)":5.87,"STS Average (3 datasets)":52.12,"Summarization Average (1 datasets)":31.26} -{"index":39,"Rank":51,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":27.57,"Classification Average (6 datasets)":23.28,"Clustering Average (7 datasets)":22.45,"PairClassification Average (2 datasets)":67.39,"Reranking Average (2 datasets)":45.86,"Retrieval Average (5 datasets)":6.73,"STS Average (3 datasets)":43.49,"Summarization Average (1 datasets)":29.43} -{"index":40,"Rank":52,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Embedding Dimensions":1024,"Max Tokens":512,"Average (26 datasets)":25.35,"Classification Average (6 datasets)":25.21,"Clustering Average (7 datasets)":25.11,"PairClassification Average (2 datasets)":64.46,"Reranking Average (2 datasets)":34.54,"Retrieval Average (5 datasets)":1.22,"STS Average (3 datasets)":32.93,"Summarization Average (1 datasets)":29.25} -{"index":10,"Rank":54,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":"","Classification Average (6 datasets)":81.62,"Clustering Average (7 datasets)":56.48,"PairClassification Average (2 datasets)":85.07,"Reranking Average (2 datasets)":85.22,"Retrieval Average (5 datasets)":"","STS Average (3 datasets)":82.59,"Summarization Average (1 datasets)":31.26} -{"index":19,"Rank":58,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Embedding Dimensions":"","Max Tokens":4096,"Average (26 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} -{"index":34,"Rank":67,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":"","Classification Average (6 datasets)":81.76,"Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":90.43,"Reranking Average (2 datasets)":78.7,"Retrieval Average (5 datasets)":55.65,"STS Average (3 datasets)":82.31,"Summarization Average (1 datasets)":31.45} -{"index":51,"Rank":70,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":"","Classification Average (6 datasets)":68.48,"Clustering Average (7 datasets)":41.33,"PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":41.15,"STS Average (3 datasets)":72.28,"Summarization Average (1 datasets)":30.28} -{"index":55,"Rank":71,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":"","Classification Average (6 datasets)":78.02,"Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":86.88,"Reranking Average (2 datasets)":83.76,"Retrieval Average (5 datasets)":52.56,"STS Average (3 datasets)":81.26,"Summarization Average (1 datasets)":30.5} -{"index":56,"Rank":72,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":"","Classification Average (6 datasets)":81.76,"Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":90.43,"Reranking Average (2 datasets)":78.7,"Retrieval Average (5 datasets)":55.65,"STS Average (3 datasets)":82.31,"Summarization Average (1 datasets)":31.45} -{"index":58,"Rank":73,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (6 datasets)":52.14,"Clustering Average (7 datasets)":33.75,"PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} -{"index":59,"Rank":74,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":34.73,"PairClassification Average (2 datasets)":70.96,"Reranking Average (2 datasets)":45.63,"Retrieval Average (5 datasets)":29.91,"STS Average (3 datasets)":68.14,"Summarization Average (1 datasets)":28.28} -{"index":60,"Rank":75,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} -{"index":61,"Rank":76,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} -{"index":62,"Rank":77,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":300,"Max Tokens":"N\/A","Average (26 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} -{"index":64,"Rank":78,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} -{"index":65,"Rank":79,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} -{"index":81,"Rank":82,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":8191,"Average (26 datasets)":"","Classification Average (6 datasets)":69.27,"Clustering Average (7 datasets)":49.67,"PairClassification Average (2 datasets)":77.14,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":48.23,"STS Average (3 datasets)":78.31,"Summarization Average (1 datasets)":30.5} -{"index":82,"Rank":83,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":8191,"Average (26 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":1,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (27 datasets)":53.5,"Classification Average (6 datasets)":64.64,"Clustering Average (7 datasets)":39.11,"PairClassification Average (2 datasets)":75.8,"Reranking Average (2 datasets)":71.44,"Retrieval Average (5 datasets)":37.18,"STS Average (3 datasets)":77.53,"Summarization Average (1 datasets)":29.47} +{"Rank":2,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (27 datasets)":50.29,"Classification Average (6 datasets)":61.02,"Clustering Average (7 datasets)":38.35,"PairClassification Average (2 datasets)":74.47,"Reranking Average (2 datasets)":64.74,"Retrieval Average (5 datasets)":32.25,"STS Average (3 datasets)":75.16,"Summarization Average (1 datasets)":29.2} +{"Rank":3,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":49.25,"Classification Average (6 datasets)":64.6,"Clustering Average (7 datasets)":38.32,"PairClassification Average (2 datasets)":74.3,"Reranking Average (2 datasets)":61.44,"Retrieval Average (5 datasets)":27.73,"STS Average (3 datasets)":73.23,"Summarization Average (1 datasets)":30.16} +{"Rank":4,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":"","Average (27 datasets)":"","Classification Average (6 datasets)":68.61,"Clustering Average (7 datasets)":44.74,"PairClassification Average (2 datasets)":77.32,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":5,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":1024,"Average (27 datasets)":"","Classification Average (6 datasets)":59.84,"Clustering Average (7 datasets)":41.88,"PairClassification Average (2 datasets)":74.36,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":6,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":16000,"Average (27 datasets)":"","Classification Average (6 datasets)":67.44,"Clustering Average (7 datasets)":45.46,"PairClassification Average (2 datasets)":76.85,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":7,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":16000,"Average (27 datasets)":"","Classification Average (6 datasets)":68.45,"Clustering Average (7 datasets)":44.23,"PairClassification Average (2 datasets)":77.3,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":8,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":32000,"Average (27 datasets)":"","Classification Average (6 datasets)":68.56,"Clustering Average (7 datasets)":46.57,"PairClassification Average (2 datasets)":78.66,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":9,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Embedding Dimensions":1024,"Max Tokens":"N\/A","Average (27 datasets)":"","Classification Average (6 datasets)":53.31,"Clustering Average (7 datasets)":29.92,"PairClassification Average (2 datasets)":81.65,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":10,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":384,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":61.3,"Clustering Average (7 datasets)":38.91,"PairClassification Average (2 datasets)":74.12,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":11,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":67.08,"Clustering Average (7 datasets)":40.7,"PairClassification Average (2 datasets)":77.67,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":12,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":13,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":14,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":15,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":46.1,"Clustering Average (7 datasets)":33.94,"PairClassification Average (2 datasets)":70.1,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":16,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":46.09,"Clustering Average (7 datasets)":34.09,"PairClassification Average (2 datasets)":70.08,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":17,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":46.09,"Clustering Average (7 datasets)":33.96,"PairClassification Average (2 datasets)":70.09,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":18,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Embedding Dimensions":"","Max Tokens":4096,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":19,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Embedding Dimensions":2048,"Max Tokens":2048,"Average (27 datasets)":"","Classification Average (6 datasets)":64.57,"Clustering Average (7 datasets)":41.7,"PairClassification Average (2 datasets)":77.28,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":20,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":21,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":56.63,"Clustering Average (7 datasets)":35.44,"PairClassification Average (2 datasets)":74.74,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":22,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Embedding Dimensions":1024,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":65.82,"Clustering Average (7 datasets)":43.17,"PairClassification Average (2 datasets)":77.11,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":23,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":24,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":25,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":24.91,"Clustering Average (7 datasets)":27.18,"PairClassification Average (2 datasets)":67.02,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":26,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":23.28,"Clustering Average (7 datasets)":22.45,"PairClassification Average (2 datasets)":67.39,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":27,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Embedding Dimensions":1024,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":25.21,"Clustering Average (7 datasets)":25.11,"PairClassification Average (2 datasets)":64.46,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":28,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":46.1,"Clustering Average (7 datasets)":33.82,"PairClassification Average (2 datasets)":70.08,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":29,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":48.37,"Clustering Average (7 datasets)":40.43,"PairClassification Average (2 datasets)":70.32,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":30,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (27 datasets)":"","Classification Average (6 datasets)":57.72,"Clustering Average (7 datasets)":41.16,"PairClassification Average (2 datasets)":76.08,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":31,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":42.66,"PairClassification Average (2 datasets)":74.82,"Reranking Average (2 datasets)":73.68,"Retrieval Average (5 datasets)":42.76,"STS Average (3 datasets)":"","Summarization Average (1 datasets)":31.86} +{"Rank":32,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":38.7,"PairClassification Average (2 datasets)":76.19,"Reranking Average (2 datasets)":74.79,"Retrieval Average (5 datasets)":44.23,"STS Average (3 datasets)":"","Summarization Average (1 datasets)":30.92} +{"Rank":33,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":38.93,"PairClassification Average (2 datasets)":74.1,"Reranking Average (2 datasets)":72.12,"Retrieval Average (5 datasets)":37.27,"STS Average (3 datasets)":"","Summarization Average (1 datasets)":31.5} +{"Rank":34,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":2048,"Average (27 datasets)":"","Classification Average (6 datasets)":54.9,"Clustering Average (7 datasets)":35.04,"PairClassification Average (2 datasets)":73.76,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":35,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":2048,"Average (27 datasets)":"","Classification Average (6 datasets)":30.38,"Clustering Average (7 datasets)":29.03,"PairClassification Average (2 datasets)":70.9,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":36,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":52.14,"Clustering Average (7 datasets)":33.75,"PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":37,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":34.73,"PairClassification Average (2 datasets)":70.96,"Reranking Average (2 datasets)":55.3,"Retrieval Average (5 datasets)":30.36,"STS Average (3 datasets)":66.72,"Summarization Average (1 datasets)":28.28} +{"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":40,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":300,"Max Tokens":"N\/A","Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":41,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":512,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":62.14,"Clustering Average (7 datasets)":38.7,"PairClassification Average (2 datasets)":71.57,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":42,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":43,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":50.9,"Clustering Average (7 datasets)":32.84,"PairClassification Average (2 datasets)":72.72,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":45,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":58.08,"Clustering Average (7 datasets)":40.07,"PairClassification Average (2 datasets)":72.38,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":46,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":61.64,"Clustering Average (7 datasets)":40.6,"PairClassification Average (2 datasets)":75.39,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":47,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":65.09,"Clustering Average (7 datasets)":41.6,"PairClassification Average (2 datasets)":77.5,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":48,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":67.36,"Clustering Average (7 datasets)":43.25,"PairClassification Average (2 datasets)":78.96,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":49,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":50,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":312,"Max Tokens":2048,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":51,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (27 datasets)":"","Classification Average (6 datasets)":55.78,"Clustering Average (7 datasets)":33.66,"PairClassification Average (2 datasets)":78.81,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":52,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":512,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":64.41,"Clustering Average (7 datasets)":42.0,"PairClassification Average (2 datasets)":71.92,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":53,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":512,"Max Tokens":512,"Average (27 datasets)":"","Classification Average (6 datasets)":66.37,"Clustering Average (7 datasets)":36.35,"PairClassification Average (2 datasets)":73.5,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (27 datasets)":"","Classification Average (6 datasets)":31.21,"Clustering Average (7 datasets)":28.42,"PairClassification Average (2 datasets)":68.4,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (27 datasets)":"","Classification Average (6 datasets)":30.71,"Clustering Average (7 datasets)":28.62,"PairClassification Average (2 datasets)":68.56,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":56,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":8191,"Average (27 datasets)":"","Classification Average (6 datasets)":69.27,"Clustering Average (7 datasets)":49.67,"PairClassification Average (2 datasets)":77.14,"Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":8191,"Average (27 datasets)":"","Classification Average (6 datasets)":"","Clustering Average (7 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (2 datasets)":"","Retrieval Average (5 datasets)":"","STS Average (3 datasets)":"","Summarization Average (1 datasets)":""} diff --git a/boards_data/fr/data_tasks/Classification/default.jsonl b/boards_data/fr/data_tasks/Classification/default.jsonl index 2165d3061bf8292ed6df3fd448d4003f7dc063d8..b582576b4e24a1177c17872611eae742d44da400 100644 --- a/boards_data/fr/data_tasks/Classification/default.jsonl +++ b/boards_data/fr/data_tasks/Classification/default.jsonl @@ -1,77 +1,57 @@ -{"level_0":0,"index":9,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":81.76,"AmazonReviewsClassification (fr)":55.53,"MasakhaNEWSClassification (fra)":82.61,"MassiveIntentClassification (fr)":81.65,"MassiveScenarioClassification (fr)":86.64,"MTOPDomainClassification (fr)":96.69,"MTOPIntentClassification (fr)":87.47} -{"level_0":1,"index":56,"Rank":2,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.76,"AmazonReviewsClassification (fr)":55.53,"MasakhaNEWSClassification (fra)":82.61,"MassiveIntentClassification (fr)":81.65,"MassiveScenarioClassification (fr)":86.64,"MTOPDomainClassification (fr)":96.69,"MTOPIntentClassification (fr)":87.47} -{"level_0":2,"index":34,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.76,"AmazonReviewsClassification (fr)":55.53,"MasakhaNEWSClassification (fra)":82.61,"MassiveIntentClassification (fr)":81.65,"MassiveScenarioClassification (fr)":86.64,"MTOPDomainClassification (fr)":96.69,"MTOPIntentClassification (fr)":87.47} -{"level_0":3,"index":10,"Rank":4,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.62,"AmazonReviewsClassification (fr)":55.19,"MasakhaNEWSClassification (fra)":82.49,"MassiveIntentClassification (fr)":79.6,"MassiveScenarioClassification (fr)":82.18,"MTOPDomainClassification (fr)":97.2,"MTOPIntentClassification (fr)":93.07} -{"level_0":4,"index":55,"Rank":5,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.02,"AmazonReviewsClassification (fr)":53.47,"MasakhaNEWSClassification (fra)":85.19,"MassiveIntentClassification (fr)":76.65,"MassiveScenarioClassification (fr)":79.1,"MTOPDomainClassification (fr)":93.48,"MTOPIntentClassification (fr)":80.23} -{"level_0":5,"index":8,"Rank":6,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.02,"AmazonReviewsClassification (fr)":53.47,"MasakhaNEWSClassification (fra)":85.19,"MassiveIntentClassification (fr)":76.65,"MassiveScenarioClassification (fr)":79.1,"MTOPDomainClassification (fr)":93.48,"MTOPIntentClassification (fr)":80.23} -{"level_0":6,"index":29,"Rank":7,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.02,"AmazonReviewsClassification (fr)":42.08,"MasakhaNEWSClassification (fra)":81.52,"MassiveIntentClassification (fr)":67.4,"MassiveScenarioClassification (fr)":71.29,"MTOPDomainClassification (fr)":89.26,"MTOPIntentClassification (fr)":68.55} -{"level_0":7,"index":81,"Rank":8,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.27,"AmazonReviewsClassification (fr)":43.76,"MasakhaNEWSClassification (fra)":81.52,"MassiveIntentClassification (fr)":65.42,"MassiveScenarioClassification (fr)":71.11,"MTOPDomainClassification (fr)":89.38,"MTOPIntentClassification (fr)":64.45} -{"level_0":8,"index":22,"Rank":9,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.16,"AmazonReviewsClassification (fr)":43.42,"MasakhaNEWSClassification (fra)":80.57,"MassiveIntentClassification (fr)":66.81,"MassiveScenarioClassification (fr)":72.99,"MTOPDomainClassification (fr)":88.33,"MTOPIntentClassification (fr)":62.85} -{"level_0":9,"index":0,"Rank":10,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.61,"AmazonReviewsClassification (fr)":41.59,"MasakhaNEWSClassification (fra)":81.4,"MassiveIntentClassification (fr)":62.83,"MassiveScenarioClassification (fr)":69.71,"MTOPDomainClassification (fr)":90.05,"MTOPIntentClassification (fr)":66.09} -{"level_0":10,"index":4,"Rank":11,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.56,"AmazonReviewsClassification (fr)":43.36,"MasakhaNEWSClassification (fra)":74.81,"MassiveIntentClassification (fr)":68.06,"MassiveScenarioClassification (fr)":74.29,"MTOPDomainClassification (fr)":90.33,"MTOPIntentClassification (fr)":60.52} -{"level_0":11,"index":51,"Rank":12,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.48,"AmazonReviewsClassification (fr)":36.62,"MasakhaNEWSClassification (fra)":80.4,"MassiveIntentClassification (fr)":65.86,"MassiveScenarioClassification (fr)":71.6,"MTOPDomainClassification (fr)":88.7,"MTOPIntentClassification (fr)":67.69} -{"level_0":12,"index":3,"Rank":13,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.45,"AmazonReviewsClassification (fr)":41.98,"MasakhaNEWSClassification (fra)":76.42,"MassiveIntentClassification (fr)":66.94,"MassiveScenarioClassification (fr)":72.78,"MTOPDomainClassification (fr)":90.12,"MTOPIntentClassification (fr)":62.44} -{"level_0":13,"index":45,"Rank":14,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":68.39,"AmazonReviewsClassification (fr)":41.91,"MasakhaNEWSClassification (fra)":79.38,"MassiveIntentClassification (fr)":69.34,"MassiveScenarioClassification (fr)":73.87,"MTOPDomainClassification (fr)":86.41,"MTOPIntentClassification (fr)":59.43} -{"level_0":14,"index":50,"Rank":15,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.57,"AmazonReviewsClassification (fr)":42.33,"MasakhaNEWSClassification (fra)":70.52,"MassiveIntentClassification (fr)":66.7,"MassiveScenarioClassification (fr)":74.58,"MTOPDomainClassification (fr)":90.39,"MTOPIntentClassification (fr)":60.88} -{"level_0":15,"index":2,"Rank":16,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.44,"AmazonReviewsClassification (fr)":42.15,"MasakhaNEWSClassification (fra)":82.13,"MassiveIntentClassification (fr)":63.08,"MassiveScenarioClassification (fr)":70.15,"MTOPDomainClassification (fr)":87.68,"MTOPIntentClassification (fr)":59.44} -{"level_0":16,"index":52,"Rank":17,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.4,"AmazonReviewsClassification (fr)":36.48,"MasakhaNEWSClassification (fra)":73.18,"MassiveIntentClassification (fr)":66.3,"MassiveScenarioClassification (fr)":71.7,"MTOPDomainClassification (fr)":88.96,"MTOPIntentClassification (fr)":67.76} -{"level_0":17,"index":72,"Rank":18,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":67.36,"AmazonReviewsClassification (fr)":46.09,"MasakhaNEWSClassification (fra)":79.1,"MassiveIntentClassification (fr)":65.91,"MassiveScenarioClassification (fr)":68.53,"MTOPDomainClassification (fr)":86.2,"MTOPIntentClassification (fr)":58.33} -{"level_0":18,"index":53,"Rank":19,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.11,"AmazonReviewsClassification (fr)":35.11,"MasakhaNEWSClassification (fra)":75.17,"MassiveIntentClassification (fr)":66.48,"MassiveScenarioClassification (fr)":71.47,"MTOPDomainClassification (fr)":88.52,"MTOPIntentClassification (fr)":65.93} -{"level_0":19,"index":15,"Rank":20,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.08,"AmazonReviewsClassification (fr)":41.89,"MasakhaNEWSClassification (fra)":83.06,"MassiveIntentClassification (fr)":62.94,"MassiveScenarioClassification (fr)":67.29,"MTOPDomainClassification (fr)":86.23,"MTOPIntentClassification (fr)":61.07} -{"level_0":20,"index":44,"Rank":21,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":66.8,"AmazonReviewsClassification (fr)":40.94,"MasakhaNEWSClassification (fra)":79.69,"MassiveIntentClassification (fr)":67.95,"MassiveScenarioClassification (fr)":71.89,"MTOPDomainClassification (fr)":84.79,"MTOPIntentClassification (fr)":55.51} -{"level_0":21,"index":21,"Rank":22,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.74,"AmazonReviewsClassification (fr)":40.35,"MasakhaNEWSClassification (fra)":77.44,"MassiveIntentClassification (fr)":64.99,"MassiveScenarioClassification (fr)":71.72,"MTOPDomainClassification (fr)":86.83,"MTOPIntentClassification (fr)":59.13} -{"level_0":22,"index":23,"Rank":23,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.54,"AmazonReviewsClassification (fr)":44.11,"MasakhaNEWSClassification (fra)":69.81,"MassiveIntentClassification (fr)":66.14,"MassiveScenarioClassification (fr)":72.74,"MTOPDomainClassification (fr)":87.82,"MTOPIntentClassification (fr)":58.63} -{"level_0":23,"index":77,"Rank":24,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.37,"AmazonReviewsClassification (fr)":35.09,"MasakhaNEWSClassification (fra)":72.04,"MassiveIntentClassification (fr)":65.8,"MassiveScenarioClassification (fr)":73.47,"MTOPDomainClassification (fr)":88.19,"MTOPIntentClassification (fr)":63.64} -{"level_0":24,"index":54,"Rank":25,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.96,"AmazonReviewsClassification (fr)":34.62,"MasakhaNEWSClassification (fra)":74.29,"MassiveIntentClassification (fr)":65.67,"MassiveScenarioClassification (fr)":71.61,"MTOPDomainClassification (fr)":86.97,"MTOPIntentClassification (fr)":62.59} -{"level_0":25,"index":80,"Rank":26,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.92,"AmazonReviewsClassification (fr)":43.51,"MasakhaNEWSClassification (fra)":72.61,"MassiveIntentClassification (fr)":65.15,"MassiveScenarioClassification (fr)":69.94,"MTOPDomainClassification (fr)":85.33,"MTOPIntentClassification (fr)":59.01} -{"level_0":26,"index":36,"Rank":27,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":65.82,"AmazonReviewsClassification (fr)":37.97,"MasakhaNEWSClassification (fra)":80.62,"MassiveIntentClassification (fr)":62.65,"MassiveScenarioClassification (fr)":69.29,"MTOPDomainClassification (fr)":85.74,"MTOPIntentClassification (fr)":58.62} -{"level_0":27,"index":24,"Rank":28,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.18,"AmazonReviewsClassification (fr)":36.48,"MasakhaNEWSClassification (fra)":78.44,"MassiveIntentClassification (fr)":64.57,"MassiveScenarioClassification (fr)":69.04,"MTOPDomainClassification (fr)":84.19,"MTOPIntentClassification (fr)":58.35} -{"level_0":28,"index":71,"Rank":29,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":65.09,"AmazonReviewsClassification (fr)":43.52,"MasakhaNEWSClassification (fra)":80.09,"MassiveIntentClassification (fr)":60.99,"MassiveScenarioClassification (fr)":66.42,"MTOPDomainClassification (fr)":85.14,"MTOPIntentClassification (fr)":54.39} -{"level_0":29,"index":68,"Rank":30,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":64.64,"AmazonReviewsClassification (fr)":39.0,"MasakhaNEWSClassification (fra)":78.1,"MassiveIntentClassification (fr)":61.88,"MassiveScenarioClassification (fr)":67.9,"MTOPDomainClassification (fr)":81.21,"MTOPIntentClassification (fr)":59.76} -{"level_0":30,"index":57,"Rank":31,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":64.6,"AmazonReviewsClassification (fr)":38.52,"MasakhaNEWSClassification (fra)":77.39,"MassiveIntentClassification (fr)":60.47,"MassiveScenarioClassification (fr)":65.1,"MTOPDomainClassification (fr)":84.14,"MTOPIntentClassification (fr)":62.01} -{"level_0":31,"index":30,"Rank":32,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":64.57,"AmazonReviewsClassification (fr)":34.79,"MasakhaNEWSClassification (fra)":79.29,"MassiveIntentClassification (fr)":59.41,"MassiveScenarioClassification (fr)":65.29,"MTOPDomainClassification (fr)":85.52,"MTOPIntentClassification (fr)":63.12} -{"level_0":32,"index":76,"Rank":33,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.41,"AmazonReviewsClassification (fr)":33.51,"MasakhaNEWSClassification (fra)":82.06,"MassiveIntentClassification (fr)":61.19,"MassiveScenarioClassification (fr)":70.22,"MTOPDomainClassification (fr)":85.5,"MTOPIntentClassification (fr)":53.98} -{"level_0":33,"index":47,"Rank":34,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":63.13,"AmazonReviewsClassification (fr)":39.68,"MasakhaNEWSClassification (fra)":77.65,"MassiveIntentClassification (fr)":65.47,"MassiveScenarioClassification (fr)":68.76,"MTOPDomainClassification (fr)":81.2,"MTOPIntentClassification (fr)":46.01} -{"level_0":34,"index":63,"Rank":35,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":62.14,"AmazonReviewsClassification (fr)":35.7,"MasakhaNEWSClassification (fra)":76.87,"MassiveIntentClassification (fr)":57.02,"MassiveScenarioClassification (fr)":65.2,"MTOPDomainClassification (fr)":84.61,"MTOPIntentClassification (fr)":53.41} -{"level_0":35,"index":70,"Rank":36,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":61.64,"AmazonReviewsClassification (fr)":41.48,"MasakhaNEWSClassification (fra)":80.43,"MassiveIntentClassification (fr)":57.01,"MassiveScenarioClassification (fr)":63.6,"MTOPDomainClassification (fr)":79.6,"MTOPIntentClassification (fr)":47.73} -{"level_0":36,"index":14,"Rank":37,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.3,"AmazonReviewsClassification (fr)":38.6,"MasakhaNEWSClassification (fra)":82.58,"MassiveIntentClassification (fr)":56.31,"MassiveScenarioClassification (fr)":59.5,"MTOPDomainClassification (fr)":80.79,"MTOPIntentClassification (fr)":50.01} -{"level_0":37,"index":67,"Rank":38,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":61.02,"AmazonReviewsClassification (fr)":35.3,"MasakhaNEWSClassification (fra)":76.09,"MassiveIntentClassification (fr)":57.52,"MassiveScenarioClassification (fr)":64.52,"MTOPDomainClassification (fr)":78.63,"MTOPIntentClassification (fr)":54.05} -{"level_0":38,"index":7,"Rank":39,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.98,"AmazonReviewsClassification (fr)":35.07,"MasakhaNEWSClassification (fra)":76.0,"MassiveIntentClassification (fr)":56.03,"MassiveScenarioClassification (fr)":59.3,"MTOPDomainClassification (fr)":75.7,"MTOPIntentClassification (fr)":63.76} -{"level_0":39,"index":1,"Rank":40,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.84,"AmazonReviewsClassification (fr)":37.26,"MasakhaNEWSClassification (fra)":80.19,"MassiveIntentClassification (fr)":53.7,"MassiveScenarioClassification (fr)":62.46,"MTOPDomainClassification (fr)":79.79,"MTOPIntentClassification (fr)":45.62} -{"level_0":40,"index":69,"Rank":41,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":58.08,"AmazonReviewsClassification (fr)":37.35,"MasakhaNEWSClassification (fra)":81.21,"MassiveIntentClassification (fr)":51.13,"MassiveScenarioClassification (fr)":59.92,"MTOPDomainClassification (fr)":75.03,"MTOPIntentClassification (fr)":43.85} -{"level_0":41,"index":43,"Rank":42,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":57.72,"AmazonReviewsClassification (fr)":36.71,"MasakhaNEWSClassification (fra)":80.59,"MassiveIntentClassification (fr)":46.39,"MassiveScenarioClassification (fr)":53.86,"MTOPDomainClassification (fr)":74.8,"MTOPIntentClassification (fr)":53.97} -{"level_0":42,"index":35,"Rank":43,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":56.63,"AmazonReviewsClassification (fr)":36.03,"MasakhaNEWSClassification (fra)":70.36,"MassiveIntentClassification (fr)":51.59,"MassiveScenarioClassification (fr)":61.28,"MTOPDomainClassification (fr)":77.1,"MTOPIntentClassification (fr)":43.44} -{"level_0":43,"index":75,"Rank":44,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.78,"AmazonReviewsClassification (fr)":34.25,"MasakhaNEWSClassification (fra)":73.84,"MassiveIntentClassification (fr)":51.93,"MassiveScenarioClassification (fr)":58.31,"MTOPDomainClassification (fr)":71.83,"MTOPIntentClassification (fr)":44.53} -{"level_0":44,"index":48,"Rank":45,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.9,"AmazonReviewsClassification (fr)":35.12,"MasakhaNEWSClassification (fra)":80.83,"MassiveIntentClassification (fr)":43.21,"MassiveScenarioClassification (fr)":49.78,"MTOPDomainClassification (fr)":69.24,"MTOPIntentClassification (fr)":51.25} -{"level_0":45,"index":5,"Rank":46,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":53.31,"AmazonReviewsClassification (fr)":31.12,"MasakhaNEWSClassification (fra)":65.9,"MassiveIntentClassification (fr)":46.13,"MassiveScenarioClassification (fr)":54.32,"MTOPDomainClassification (fr)":72.26,"MTOPIntentClassification (fr)":50.12} -{"level_0":46,"index":58,"Rank":47,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":52.14,"AmazonReviewsClassification (fr)":27.54,"MasakhaNEWSClassification (fra)":72.2,"MassiveIntentClassification (fr)":44.82,"MassiveScenarioClassification (fr)":53.76,"MTOPDomainClassification (fr)":75.59,"MTOPIntentClassification (fr)":38.94} -{"level_0":47,"index":66,"Rank":48,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":50.9,"AmazonReviewsClassification (fr)":27.05,"MasakhaNEWSClassification (fra)":75.62,"MassiveIntentClassification (fr)":42.64,"MassiveScenarioClassification (fr)":49.92,"MTOPDomainClassification (fr)":72.97,"MTOPIntentClassification (fr)":37.18} -{"level_0":48,"index":42,"Rank":49,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":48.37,"AmazonReviewsClassification (fr)":29.02,"MasakhaNEWSClassification (fra)":75.69,"MassiveIntentClassification (fr)":38.01,"MassiveScenarioClassification (fr)":43.63,"MTOPDomainClassification (fr)":64.49,"MTOPIntentClassification (fr)":39.4} -{"level_0":49,"index":41,"Rank":50,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":46.1,"AmazonReviewsClassification (fr)":29.39,"MasakhaNEWSClassification (fra)":64.0,"MassiveIntentClassification (fr)":37.3,"MassiveScenarioClassification (fr)":44.47,"MTOPDomainClassification (fr)":63.61,"MTOPIntentClassification (fr)":37.84} -{"level_0":50,"index":16,"Rank":51,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":46.1,"AmazonReviewsClassification (fr)":29.38,"MasakhaNEWSClassification (fra)":63.93,"MassiveIntentClassification (fr)":37.28,"MassiveScenarioClassification (fr)":44.5,"MTOPDomainClassification (fr)":63.65,"MTOPIntentClassification (fr)":37.87} -{"level_0":51,"index":18,"Rank":52,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":46.09,"AmazonReviewsClassification (fr)":29.39,"MasakhaNEWSClassification (fra)":63.91,"MassiveIntentClassification (fr)":37.3,"MassiveScenarioClassification (fr)":44.47,"MTOPDomainClassification (fr)":63.63,"MTOPIntentClassification (fr)":37.86} -{"level_0":52,"index":17,"Rank":53,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":46.09,"AmazonReviewsClassification (fr)":29.35,"MasakhaNEWSClassification (fra)":63.89,"MassiveIntentClassification (fr)":37.28,"MassiveScenarioClassification (fr)":44.47,"MTOPDomainClassification (fr)":63.7,"MTOPIntentClassification (fr)":37.85} -{"level_0":53,"index":78,"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":31.21,"AmazonReviewsClassification (fr)":26.75,"MasakhaNEWSClassification (fra)":60.5,"MassiveIntentClassification (fr)":13.58,"MassiveScenarioClassification (fr)":23.21,"MTOPDomainClassification (fr)":43.83,"MTOPIntentClassification (fr)":19.38} -{"level_0":54,"index":79,"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":30.71,"AmazonReviewsClassification (fr)":26.62,"MasakhaNEWSClassification (fra)":65.76,"MassiveIntentClassification (fr)":15.82,"MassiveScenarioClassification (fr)":23.92,"MTOPDomainClassification (fr)":36.77,"MTOPIntentClassification (fr)":15.37} -{"level_0":55,"index":49,"Rank":56,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":30.38,"AmazonReviewsClassification (fr)":26.85,"MasakhaNEWSClassification (fra)":67.94,"MassiveIntentClassification (fr)":15.09,"MassiveScenarioClassification (fr)":21.67,"MTOPDomainClassification (fr)":34.99,"MTOPIntentClassification (fr)":15.76} -{"level_0":56,"index":40,"Rank":57,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":25.21,"AmazonReviewsClassification (fr)":22.45,"MasakhaNEWSClassification (fra)":55.64,"MassiveIntentClassification (fr)":16.41,"MassiveScenarioClassification (fr)":22.72,"MTOPDomainClassification (fr)":24.27,"MTOPIntentClassification (fr)":9.79} -{"level_0":57,"index":38,"Rank":58,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":24.91,"AmazonReviewsClassification (fr)":24.9,"MasakhaNEWSClassification (fra)":71.14,"MassiveIntentClassification (fr)":6.98,"MassiveScenarioClassification (fr)":11.41,"MTOPDomainClassification (fr)":25.55,"MTOPIntentClassification (fr)":9.49} -{"level_0":58,"index":39,"Rank":59,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":23.28,"AmazonReviewsClassification (fr)":23.52,"MasakhaNEWSClassification (fra)":62.61,"MassiveIntentClassification (fr)":6.24,"MassiveScenarioClassification (fr)":10.98,"MTOPDomainClassification (fr)":27.74,"MTOPIntentClassification (fr)":8.61} -{"level_0":59,"index":11,"Rank":61,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":"","MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":37.53,"MassiveScenarioClassification (fr)":45.32,"MTOPDomainClassification (fr)":54.97,"MTOPIntentClassification (fr)":26.69} -{"level_0":60,"index":12,"Rank":62,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":"","MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":32.64,"MassiveScenarioClassification (fr)":40.66,"MTOPDomainClassification (fr)":"","MTOPIntentClassification (fr)":""} -{"level_0":61,"index":13,"Rank":63,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":"","MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":33.16,"MassiveScenarioClassification (fr)":40.92,"MTOPDomainClassification (fr)":"","MTOPIntentClassification (fr)":""} -{"level_0":62,"index":20,"Rank":65,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":31.56,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":46.89,"MassiveScenarioClassification (fr)":56.99,"MTOPDomainClassification (fr)":79.8,"MTOPIntentClassification (fr)":38.96} -{"level_0":63,"index":26,"Rank":67,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":26.39,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":43.44,"MassiveScenarioClassification (fr)":45.07,"MTOPDomainClassification (fr)":65.35,"MTOPIntentClassification (fr)":46.33} -{"level_0":64,"index":27,"Rank":68,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":27.4,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":43.67,"MassiveScenarioClassification (fr)":45.92,"MTOPDomainClassification (fr)":63.13,"MTOPIntentClassification (fr)":44.34} -{"level_0":65,"index":28,"Rank":69,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":37.84,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":"","MassiveScenarioClassification (fr)":"","MTOPDomainClassification (fr)":81.32,"MTOPIntentClassification (fr)":58.67} -{"level_0":66,"index":31,"Rank":70,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":39.47,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":64.27,"MassiveScenarioClassification (fr)":69.76,"MTOPDomainClassification (fr)":86.22,"MTOPIntentClassification (fr)":59.43} -{"level_0":67,"index":32,"Rank":71,"Model":"sgpt-bloom-1b7-nli<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":39.29,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":64.58,"MassiveScenarioClassification (fr)":69.6,"MTOPDomainClassification (fr)":83.8,"MTOPIntentClassification (fr)":63.36} -{"level_0":68,"index":33,"Rank":72,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":35.92,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":66.95,"MassiveScenarioClassification (fr)":72.91,"MTOPDomainClassification (fr)":90.98,"MTOPIntentClassification (fr)":69.12} -{"level_0":69,"index":37,"Rank":73,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":34.91,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":58.55,"MassiveScenarioClassification (fr)":63.02,"MTOPDomainClassification (fr)":86.19,"MTOPIntentClassification (fr)":66.75} -{"level_0":70,"index":46,"Rank":74,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":48.51,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":73.32,"MassiveScenarioClassification (fr)":77.07,"MTOPDomainClassification (fr)":89.97,"MTOPIntentClassification (fr)":76.72} -{"level_0":71,"index":59,"Rank":75,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","AmazonReviewsClassification (fr)":"","MasakhaNEWSClassification (fra)":74.05,"MassiveIntentClassification (fr)":"","MassiveScenarioClassification (fr)":"","MTOPDomainClassification (fr)":"","MTOPIntentClassification (fr)":""} -{"level_0":72,"index":61,"Rank":77,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonReviewsClassification (fr)":23.31,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":38.41,"MassiveScenarioClassification (fr)":40.26,"MTOPDomainClassification (fr)":54.61,"MTOPIntentClassification (fr)":34.71} -{"level_0":73,"index":64,"Rank":79,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AmazonReviewsClassification (fr)":33.48,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":54.83,"MassiveScenarioClassification (fr)":64.06,"MTOPDomainClassification (fr)":82.48,"MTOPIntentClassification (fr)":46.39} -{"level_0":74,"index":65,"Rank":80,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AmazonReviewsClassification (fr)":35.48,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":57.67,"MassiveScenarioClassification (fr)":66.72,"MTOPDomainClassification (fr)":85.05,"MTOPIntentClassification (fr)":51.07} -{"level_0":75,"index":73,"Rank":81,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":"","MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":30.84,"MassiveScenarioClassification (fr)":42.42,"MTOPDomainClassification (fr)":"","MTOPIntentClassification (fr)":""} -{"level_0":76,"index":74,"Rank":82,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (fr)":33.45,"MasakhaNEWSClassification (fra)":"","MassiveIntentClassification (fr)":58.8,"MassiveScenarioClassification (fr)":63.39,"MTOPDomainClassification (fr)":76.17,"MTOPIntentClassification (fr)":53.26} +{"Rank":1,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.27,"AmazonReviewsClassification (fr)":43.76,"MasakhaNEWSClassification (fra)":81.52,"MassiveIntentClassification (fr)":65.42,"MassiveScenarioClassification (fr)":71.11,"MTOPDomainClassification (fr)":89.38,"MTOPIntentClassification (fr)":64.45} +{"Rank":2,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.61,"AmazonReviewsClassification (fr)":41.59,"MasakhaNEWSClassification (fra)":81.4,"MassiveIntentClassification (fr)":62.83,"MassiveScenarioClassification (fr)":69.71,"MTOPDomainClassification (fr)":90.05,"MTOPIntentClassification (fr)":66.09} +{"Rank":3,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.56,"AmazonReviewsClassification (fr)":43.36,"MasakhaNEWSClassification (fra)":74.81,"MassiveIntentClassification (fr)":68.06,"MassiveScenarioClassification (fr)":74.29,"MTOPDomainClassification (fr)":90.33,"MTOPIntentClassification (fr)":60.52} +{"Rank":4,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.45,"AmazonReviewsClassification (fr)":41.98,"MasakhaNEWSClassification (fra)":76.42,"MassiveIntentClassification (fr)":66.94,"MassiveScenarioClassification (fr)":72.78,"MTOPDomainClassification (fr)":90.12,"MTOPIntentClassification (fr)":62.44} +{"Rank":5,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.44,"AmazonReviewsClassification (fr)":42.15,"MasakhaNEWSClassification (fra)":82.13,"MassiveIntentClassification (fr)":63.08,"MassiveScenarioClassification (fr)":70.15,"MTOPDomainClassification (fr)":87.68,"MTOPIntentClassification (fr)":59.44} +{"Rank":6,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":67.36,"AmazonReviewsClassification (fr)":46.09,"MasakhaNEWSClassification (fra)":79.1,"MassiveIntentClassification (fr)":65.91,"MassiveScenarioClassification (fr)":68.53,"MTOPDomainClassification (fr)":86.2,"MTOPIntentClassification (fr)":58.33} +{"Rank":7,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.08,"AmazonReviewsClassification (fr)":41.89,"MasakhaNEWSClassification (fra)":83.06,"MassiveIntentClassification (fr)":62.94,"MassiveScenarioClassification (fr)":67.29,"MTOPDomainClassification (fr)":86.23,"MTOPIntentClassification (fr)":61.07} +{"Rank":8,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.37,"AmazonReviewsClassification (fr)":35.09,"MasakhaNEWSClassification (fra)":72.04,"MassiveIntentClassification (fr)":65.8,"MassiveScenarioClassification (fr)":73.47,"MTOPDomainClassification (fr)":88.19,"MTOPIntentClassification (fr)":63.64} +{"Rank":9,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":65.82,"AmazonReviewsClassification (fr)":37.97,"MasakhaNEWSClassification (fra)":80.62,"MassiveIntentClassification (fr)":62.65,"MassiveScenarioClassification (fr)":69.29,"MTOPDomainClassification (fr)":85.74,"MTOPIntentClassification (fr)":58.62} +{"Rank":10,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":65.09,"AmazonReviewsClassification (fr)":43.52,"MasakhaNEWSClassification (fra)":80.09,"MassiveIntentClassification (fr)":60.99,"MassiveScenarioClassification (fr)":66.42,"MTOPDomainClassification (fr)":85.14,"MTOPIntentClassification (fr)":54.39} +{"Rank":11,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":64.64,"AmazonReviewsClassification (fr)":39.0,"MasakhaNEWSClassification (fra)":78.1,"MassiveIntentClassification (fr)":61.88,"MassiveScenarioClassification (fr)":67.9,"MTOPDomainClassification (fr)":81.21,"MTOPIntentClassification (fr)":59.76} +{"Rank":12,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":64.6,"AmazonReviewsClassification (fr)":38.52,"MasakhaNEWSClassification (fra)":77.39,"MassiveIntentClassification (fr)":60.47,"MassiveScenarioClassification (fr)":65.1,"MTOPDomainClassification (fr)":84.14,"MTOPIntentClassification (fr)":62.01} +{"Rank":13,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":64.57,"AmazonReviewsClassification (fr)":34.79,"MasakhaNEWSClassification (fra)":79.29,"MassiveIntentClassification (fr)":59.41,"MassiveScenarioClassification (fr)":65.29,"MTOPDomainClassification (fr)":85.52,"MTOPIntentClassification (fr)":63.12} +{"Rank":14,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.41,"AmazonReviewsClassification (fr)":33.51,"MasakhaNEWSClassification (fra)":82.06,"MassiveIntentClassification (fr)":61.19,"MassiveScenarioClassification (fr)":70.22,"MTOPDomainClassification (fr)":85.5,"MTOPIntentClassification (fr)":53.98} +{"Rank":15,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":62.14,"AmazonReviewsClassification (fr)":35.7,"MasakhaNEWSClassification (fra)":76.87,"MassiveIntentClassification (fr)":57.02,"MassiveScenarioClassification (fr)":65.2,"MTOPDomainClassification (fr)":84.61,"MTOPIntentClassification (fr)":53.41} +{"Rank":16,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":61.64,"AmazonReviewsClassification (fr)":41.48,"MasakhaNEWSClassification (fra)":80.43,"MassiveIntentClassification (fr)":57.01,"MassiveScenarioClassification (fr)":63.6,"MTOPDomainClassification (fr)":79.6,"MTOPIntentClassification (fr)":47.73} +{"Rank":17,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.3,"AmazonReviewsClassification (fr)":38.6,"MasakhaNEWSClassification (fra)":82.58,"MassiveIntentClassification (fr)":56.31,"MassiveScenarioClassification (fr)":59.5,"MTOPDomainClassification (fr)":80.79,"MTOPIntentClassification (fr)":50.01} +{"Rank":18,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":61.02,"AmazonReviewsClassification (fr)":35.3,"MasakhaNEWSClassification (fra)":76.09,"MassiveIntentClassification (fr)":57.52,"MassiveScenarioClassification (fr)":64.52,"MTOPDomainClassification (fr)":78.63,"MTOPIntentClassification (fr)":54.05} +{"Rank":19,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.84,"AmazonReviewsClassification (fr)":37.26,"MasakhaNEWSClassification (fra)":80.19,"MassiveIntentClassification (fr)":53.7,"MassiveScenarioClassification (fr)":62.46,"MTOPDomainClassification (fr)":79.79,"MTOPIntentClassification (fr)":45.62} +{"Rank":20,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":58.08,"AmazonReviewsClassification (fr)":37.35,"MasakhaNEWSClassification (fra)":81.21,"MassiveIntentClassification (fr)":51.13,"MassiveScenarioClassification (fr)":59.92,"MTOPDomainClassification (fr)":75.03,"MTOPIntentClassification (fr)":43.85} +{"Rank":21,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":57.72,"AmazonReviewsClassification (fr)":36.71,"MasakhaNEWSClassification (fra)":80.59,"MassiveIntentClassification (fr)":46.39,"MassiveScenarioClassification (fr)":53.86,"MTOPDomainClassification (fr)":74.8,"MTOPIntentClassification (fr)":53.97} +{"Rank":22,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":56.63,"AmazonReviewsClassification (fr)":36.03,"MasakhaNEWSClassification (fra)":70.36,"MassiveIntentClassification (fr)":51.59,"MassiveScenarioClassification (fr)":61.28,"MTOPDomainClassification (fr)":77.1,"MTOPIntentClassification (fr)":43.44} +{"Rank":23,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.78,"AmazonReviewsClassification (fr)":34.25,"MasakhaNEWSClassification (fra)":73.84,"MassiveIntentClassification (fr)":51.93,"MassiveScenarioClassification (fr)":58.31,"MTOPDomainClassification (fr)":71.83,"MTOPIntentClassification (fr)":44.53} +{"Rank":24,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.9,"AmazonReviewsClassification (fr)":35.12,"MasakhaNEWSClassification (fra)":80.83,"MassiveIntentClassification (fr)":43.21,"MassiveScenarioClassification (fr)":49.78,"MTOPDomainClassification (fr)":69.24,"MTOPIntentClassification (fr)":51.25} +{"Rank":25,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":53.31,"AmazonReviewsClassification (fr)":31.12,"MasakhaNEWSClassification (fra)":65.9,"MassiveIntentClassification (fr)":46.13,"MassiveScenarioClassification (fr)":54.32,"MTOPDomainClassification (fr)":72.26,"MTOPIntentClassification (fr)":50.12} +{"Rank":26,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":52.14,"AmazonReviewsClassification (fr)":27.54,"MasakhaNEWSClassification (fra)":72.2,"MassiveIntentClassification (fr)":44.82,"MassiveScenarioClassification (fr)":53.76,"MTOPDomainClassification (fr)":75.59,"MTOPIntentClassification (fr)":38.94} +{"Rank":27,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":50.9,"AmazonReviewsClassification (fr)":27.05,"MasakhaNEWSClassification (fra)":75.62,"MassiveIntentClassification (fr)":42.64,"MassiveScenarioClassification (fr)":49.92,"MTOPDomainClassification (fr)":72.97,"MTOPIntentClassification (fr)":37.18} +{"Rank":28,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":48.37,"AmazonReviewsClassification (fr)":29.02,"MasakhaNEWSClassification (fra)":75.69,"MassiveIntentClassification (fr)":38.01,"MassiveScenarioClassification (fr)":43.63,"MTOPDomainClassification (fr)":64.49,"MTOPIntentClassification (fr)":39.4} +{"Rank":29,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":46.1,"AmazonReviewsClassification (fr)":29.39,"MasakhaNEWSClassification (fra)":64.0,"MassiveIntentClassification (fr)":37.3,"MassiveScenarioClassification (fr)":44.47,"MTOPDomainClassification (fr)":63.61,"MTOPIntentClassification (fr)":37.84} +{"Rank":30,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":46.1,"AmazonReviewsClassification (fr)":29.38,"MasakhaNEWSClassification (fra)":63.93,"MassiveIntentClassification (fr)":37.28,"MassiveScenarioClassification (fr)":44.5,"MTOPDomainClassification (fr)":63.65,"MTOPIntentClassification (fr)":37.87} +{"Rank":31,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":46.09,"AmazonReviewsClassification (fr)":29.39,"MasakhaNEWSClassification (fra)":63.91,"MassiveIntentClassification (fr)":37.3,"MassiveScenarioClassification (fr)":44.47,"MTOPDomainClassification (fr)":63.63,"MTOPIntentClassification (fr)":37.86} +{"Rank":32,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":46.09,"AmazonReviewsClassification (fr)":29.35,"MasakhaNEWSClassification (fra)":63.89,"MassiveIntentClassification (fr)":37.28,"MassiveScenarioClassification (fr)":44.47,"MTOPDomainClassification (fr)":63.7,"MTOPIntentClassification (fr)":37.85} +{"Rank":33,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":31.21,"AmazonReviewsClassification (fr)":26.75,"MasakhaNEWSClassification (fra)":60.5,"MassiveIntentClassification (fr)":13.58,"MassiveScenarioClassification (fr)":23.21,"MTOPDomainClassification (fr)":43.83,"MTOPIntentClassification (fr)":19.38} +{"Rank":34,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":30.71,"AmazonReviewsClassification (fr)":26.62,"MasakhaNEWSClassification (fra)":65.76,"MassiveIntentClassification (fr)":15.82,"MassiveScenarioClassification (fr)":23.92,"MTOPDomainClassification (fr)":36.77,"MTOPIntentClassification (fr)":15.37} +{"Rank":35,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":30.38,"AmazonReviewsClassification (fr)":26.85,"MasakhaNEWSClassification (fra)":67.94,"MassiveIntentClassification (fr)":15.09,"MassiveScenarioClassification (fr)":21.67,"MTOPDomainClassification (fr)":34.99,"MTOPIntentClassification (fr)":15.76} +{"Rank":36,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":25.21,"AmazonReviewsClassification (fr)":22.45,"MasakhaNEWSClassification (fra)":55.64,"MassiveIntentClassification (fr)":16.41,"MassiveScenarioClassification (fr)":22.72,"MTOPDomainClassification (fr)":24.27,"MTOPIntentClassification (fr)":9.79} +{"Rank":37,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":24.91,"AmazonReviewsClassification (fr)":24.9,"MasakhaNEWSClassification (fra)":71.14,"MassiveIntentClassification (fr)":6.98,"MassiveScenarioClassification (fr)":11.41,"MTOPDomainClassification (fr)":25.55,"MTOPIntentClassification (fr)":9.49} +{"Rank":38,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":23.28,"AmazonReviewsClassification (fr)":23.52,"MasakhaNEWSClassification (fra)":62.61,"MassiveIntentClassification (fr)":6.24,"MassiveScenarioClassification (fr)":10.98,"MTOPDomainClassification (fr)":27.74,"MTOPIntentClassification (fr)":8.61} +{"Rank":39,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":40,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":41,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":42,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":43,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":44,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":45,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":46,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null,"AmazonReviewsClassification (fr)":40.94,"MasakhaNEWSClassification (fra)":79.69,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":84.79,"MTOPIntentClassification (fr)":55.51} +{"Rank":47,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"AmazonReviewsClassification (fr)":41.91,"MasakhaNEWSClassification (fra)":79.38,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":86.41,"MTOPIntentClassification (fr)":59.43} +{"Rank":48,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"AmazonReviewsClassification (fr)":39.68,"MasakhaNEWSClassification (fra)":77.65,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":81.2,"MTOPIntentClassification (fr)":46.01} +{"Rank":49,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":74.05,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":50,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":51,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AmazonReviewsClassification (fr)":23.31,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":38.41,"MassiveScenarioClassification (fr)":40.26,"MTOPDomainClassification (fr)":54.61,"MTOPIntentClassification (fr)":34.71} +{"Rank":52,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":53,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AmazonReviewsClassification (fr)":33.48,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":54.83,"MassiveScenarioClassification (fr)":64.06,"MTOPDomainClassification (fr)":82.48,"MTOPIntentClassification (fr)":46.39} +{"Rank":54,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AmazonReviewsClassification (fr)":35.48,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":57.67,"MassiveScenarioClassification (fr)":66.72,"MTOPDomainClassification (fr)":85.05,"MTOPIntentClassification (fr)":51.07} +{"Rank":55,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":56,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AmazonReviewsClassification (fr)":null,"MasakhaNEWSClassification (fra)":null,"MassiveIntentClassification (fr)":null,"MassiveScenarioClassification (fr)":null,"MTOPDomainClassification (fr)":null,"MTOPIntentClassification (fr)":null} diff --git a/boards_data/fr/data_tasks/Clustering/default.jsonl b/boards_data/fr/data_tasks/Clustering/default.jsonl index f3fd8c010529c23d5098f163cf81a7189220d1aa..d0ea9ac88d9fb3a9ba472f51e055c20b09b22807 100644 --- a/boards_data/fr/data_tasks/Clustering/default.jsonl +++ b/boards_data/fr/data_tasks/Clustering/default.jsonl @@ -1,60 +1,57 @@ -{"level_0":0,"index":10,"Rank":1,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.48,"AlloProfClusteringP2P":71.2,"AlloProfClusteringS2S":59.64,"HALClusteringS2S":28.19,"MLSUMClusteringP2P (fr)":47.75,"MLSUMClusteringS2S (fr)":47.46,"MasakhaNEWSClusteringP2P (fra)":73.86,"MasakhaNEWSClusteringS2S (fra)":67.24} -{"level_0":1,"index":9,"Rank":2,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":55.56,"AlloProfClusteringP2P":76.06,"AlloProfClusteringS2S":64.52,"HALClusteringS2S":30.83,"MLSUMClusteringP2P (fr)":50.03,"MLSUMClusteringS2S (fr)":43.7,"MasakhaNEWSClusteringP2P (fra)":60.19,"MasakhaNEWSClusteringS2S (fra)":63.62} -{"level_0":2,"index":8,"Rank":3,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.01,"AlloProfClusteringP2P":70.55,"AlloProfClusteringS2S":55.42,"HALClusteringS2S":28.3,"MLSUMClusteringP2P (fr)":45.27,"MLSUMClusteringS2S (fr)":42.77,"MasakhaNEWSClusteringP2P (fra)":71.04,"MasakhaNEWSClusteringS2S (fra)":71.71} -{"level_0":3,"index":22,"Rank":4,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.26,"AlloProfClusteringP2P":65.3,"AlloProfClusteringS2S":55.37,"HALClusteringS2S":26.27,"MLSUMClusteringP2P (fr)":42.6,"MLSUMClusteringS2S (fr)":42.92,"MasakhaNEWSClusteringP2P (fra)":71.29,"MasakhaNEWSClusteringS2S (fra)":55.09} -{"level_0":4,"index":81,"Rank":5,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.67,"AlloProfClusteringP2P":64.83,"AlloProfClusteringS2S":53.52,"HALClusteringS2S":26.18,"MLSUMClusteringP2P (fr)":44.59,"MLSUMClusteringS2S (fr)":41.67,"MasakhaNEWSClusteringP2P (fra)":68.35,"MasakhaNEWSClusteringS2S (fra)":48.58} -{"level_0":5,"index":21,"Rank":6,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.45,"AlloProfClusteringP2P":64.71,"AlloProfClusteringS2S":45.57,"HALClusteringS2S":25.37,"MLSUMClusteringP2P (fr)":44.23,"MLSUMClusteringS2S (fr)":44.58,"MasakhaNEWSClusteringP2P (fra)":61.58,"MasakhaNEWSClusteringS2S (fra)":46.1} -{"level_0":6,"index":4,"Rank":7,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.57,"AlloProfClusteringP2P":65.37,"AlloProfClusteringS2S":47.03,"HALClusteringS2S":27.67,"MLSUMClusteringP2P (fr)":45.99,"MLSUMClusteringS2S (fr)":45.57,"MasakhaNEWSClusteringP2P (fra)":44.53,"MasakhaNEWSClusteringS2S (fra)":49.8} -{"level_0":7,"index":24,"Rank":8,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.25,"AlloProfClusteringP2P":62.17,"AlloProfClusteringS2S":45.12,"HALClusteringS2S":23.56,"MLSUMClusteringP2P (fr)":43.3,"MLSUMClusteringS2S (fr)":42.77,"MasakhaNEWSClusteringP2P (fra)":52.88,"MasakhaNEWSClusteringS2S (fra)":53.93} -{"level_0":8,"index":2,"Rank":9,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.46,"AlloProfClusteringP2P":61.63,"AlloProfClusteringS2S":50.67,"HALClusteringS2S":27.44,"MLSUMClusteringP2P (fr)":45.23,"MLSUMClusteringS2S (fr)":41.48,"MasakhaNEWSClusteringP2P (fra)":56.59,"MasakhaNEWSClusteringS2S (fra)":35.18} -{"level_0":9,"index":0,"Rank":10,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.74,"AlloProfClusteringP2P":62.01,"AlloProfClusteringS2S":49.2,"HALClusteringS2S":26.17,"MLSUMClusteringP2P (fr)":45.28,"MLSUMClusteringS2S (fr)":42.74,"MasakhaNEWSClusteringP2P (fra)":48.13,"MasakhaNEWSClusteringS2S (fra)":39.62} -{"level_0":10,"index":3,"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.23,"AlloProfClusteringP2P":62.5,"AlloProfClusteringS2S":44.28,"HALClusteringS2S":26.36,"MLSUMClusteringP2P (fr)":44.03,"MLSUMClusteringS2S (fr)":42.95,"MasakhaNEWSClusteringP2P (fra)":50.68,"MasakhaNEWSClusteringS2S (fra)":38.79} -{"level_0":11,"index":72,"Rank":12,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":43.25,"AlloProfClusteringP2P":60.98,"AlloProfClusteringS2S":43.5,"HALClusteringS2S":21.4,"MLSUMClusteringP2P (fr)":42.24,"MLSUMClusteringS2S (fr)":35.25,"MasakhaNEWSClusteringP2P (fra)":61.15,"MasakhaNEWSClusteringS2S (fra)":38.24} -{"level_0":12,"index":36,"Rank":13,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":43.17,"AlloProfClusteringP2P":62.69,"AlloProfClusteringS2S":42.06,"HALClusteringS2S":23.9,"MLSUMClusteringP2P (fr)":42.04,"MLSUMClusteringS2S (fr)":32.29,"MasakhaNEWSClusteringP2P (fra)":54.51,"MasakhaNEWSClusteringS2S (fra)":44.73} -{"level_0":13,"index":44,"Rank":14,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":42.66,"AlloProfClusteringP2P":62.09,"AlloProfClusteringS2S":32.98,"HALClusteringS2S":22.48,"MLSUMClusteringP2P (fr)":43.48,"MLSUMClusteringS2S (fr)":38.53,"MasakhaNEWSClusteringP2P (fra)":47.91,"MasakhaNEWSClusteringS2S (fra)":51.16} -{"level_0":14,"index":76,"Rank":15,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.0,"AlloProfClusteringP2P":56.9,"AlloProfClusteringS2S":37.84,"HALClusteringS2S":18.95,"MLSUMClusteringP2P (fr)":43.9,"MLSUMClusteringS2S (fr)":35.5,"MasakhaNEWSClusteringP2P (fra)":60.57,"MasakhaNEWSClusteringS2S (fra)":40.31} -{"level_0":15,"index":1,"Rank":16,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.88,"AlloProfClusteringP2P":57.96,"AlloProfClusteringS2S":41.65,"HALClusteringS2S":24.84,"MLSUMClusteringP2P (fr)":45.08,"MLSUMClusteringS2S (fr)":38.77,"MasakhaNEWSClusteringP2P (fra)":48.54,"MasakhaNEWSClusteringS2S (fra)":36.33} -{"level_0":16,"index":54,"Rank":17,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.82,"AlloProfClusteringP2P":57.43,"AlloProfClusteringS2S":39.09,"HALClusteringS2S":25.77,"MLSUMClusteringP2P (fr)":42.03,"MLSUMClusteringS2S (fr)":41.83,"MasakhaNEWSClusteringP2P (fra)":49.68,"MasakhaNEWSClusteringS2S (fra)":36.91} -{"level_0":17,"index":30,"Rank":18,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":41.7,"AlloProfClusteringP2P":64.12,"AlloProfClusteringS2S":32.52,"HALClusteringS2S":23.4,"MLSUMClusteringP2P (fr)":42.94,"MLSUMClusteringS2S (fr)":33.91,"MasakhaNEWSClusteringP2P (fra)":53.94,"MasakhaNEWSClusteringS2S (fra)":41.05} -{"level_0":18,"index":71,"Rank":19,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":41.6,"AlloProfClusteringP2P":60.37,"AlloProfClusteringS2S":40.76,"HALClusteringS2S":20.28,"MLSUMClusteringP2P (fr)":41.61,"MLSUMClusteringS2S (fr)":33.6,"MasakhaNEWSClusteringP2P (fra)":62.82,"MasakhaNEWSClusteringS2S (fra)":31.74} -{"level_0":19,"index":51,"Rank":20,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.33,"AlloProfClusteringP2P":59.89,"AlloProfClusteringS2S":38.46,"HALClusteringS2S":25.68,"MLSUMClusteringP2P (fr)":44.01,"MLSUMClusteringS2S (fr)":36.92,"MasakhaNEWSClusteringP2P (fra)":47.22,"MasakhaNEWSClusteringS2S (fra)":37.16} -{"level_0":20,"index":7,"Rank":21,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.21,"AlloProfClusteringP2P":64.0,"AlloProfClusteringS2S":29.93,"HALClusteringS2S":20.82,"MLSUMClusteringP2P (fr)":45.26,"MLSUMClusteringS2S (fr)":44.95,"MasakhaNEWSClusteringP2P (fra)":51.34,"MasakhaNEWSClusteringS2S (fra)":32.2} -{"level_0":21,"index":43,"Rank":22,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":41.16,"AlloProfClusteringP2P":61.06,"AlloProfClusteringS2S":28.12,"HALClusteringS2S":19.69,"MLSUMClusteringP2P (fr)":45.59,"MLSUMClusteringS2S (fr)":32.0,"MasakhaNEWSClusteringP2P (fra)":52.47,"MasakhaNEWSClusteringS2S (fra)":49.2} -{"level_0":22,"index":15,"Rank":23,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.7,"AlloProfClusteringP2P":63.53,"AlloProfClusteringS2S":36.18,"HALClusteringS2S":19.9,"MLSUMClusteringP2P (fr)":45.08,"MLSUMClusteringS2S (fr)":34.75,"MasakhaNEWSClusteringP2P (fra)":53.18,"MasakhaNEWSClusteringS2S (fra)":32.31} -{"level_0":23,"index":70,"Rank":24,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":40.6,"AlloProfClusteringP2P":61.82,"AlloProfClusteringS2S":39.78,"HALClusteringS2S":18.73,"MLSUMClusteringP2P (fr)":42.07,"MLSUMClusteringS2S (fr)":31.87,"MasakhaNEWSClusteringP2P (fra)":58.6,"MasakhaNEWSClusteringS2S (fra)":31.33} -{"level_0":24,"index":42,"Rank":25,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":40.43,"AlloProfClusteringP2P":60.66,"AlloProfClusteringS2S":35.05,"HALClusteringS2S":20.9,"MLSUMClusteringP2P (fr)":43.5,"MLSUMClusteringS2S (fr)":30.99,"MasakhaNEWSClusteringP2P (fra)":49.71,"MasakhaNEWSClusteringS2S (fra)":42.23} -{"level_0":25,"index":53,"Rank":26,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.4,"AlloProfClusteringP2P":56.73,"AlloProfClusteringS2S":37.62,"HALClusteringS2S":25.76,"MLSUMClusteringP2P (fr)":41.82,"MLSUMClusteringS2S (fr)":41.83,"MasakhaNEWSClusteringP2P (fra)":42.63,"MasakhaNEWSClusteringS2S (fra)":36.4} -{"level_0":26,"index":69,"Rank":27,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":40.07,"AlloProfClusteringP2P":58.44,"AlloProfClusteringS2S":35.93,"HALClusteringS2S":17.72,"MLSUMClusteringP2P (fr)":40.77,"MLSUMClusteringS2S (fr)":30.06,"MasakhaNEWSClusteringP2P (fra)":61.9,"MasakhaNEWSClusteringS2S (fra)":35.64} -{"level_0":27,"index":80,"Rank":28,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.86,"AlloProfClusteringP2P":56.89,"AlloProfClusteringS2S":38.2,"HALClusteringS2S":24.5,"MLSUMClusteringP2P (fr)":41.79,"MLSUMClusteringS2S (fr)":41.55,"MasakhaNEWSClusteringP2P (fra)":49.18,"MasakhaNEWSClusteringS2S (fra)":26.94} -{"level_0":28,"index":52,"Rank":29,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.81,"AlloProfClusteringP2P":59.15,"AlloProfClusteringS2S":36.45,"HALClusteringS2S":24.97,"MLSUMClusteringP2P (fr)":42.49,"MLSUMClusteringS2S (fr)":34.45,"MasakhaNEWSClusteringP2P (fra)":47.58,"MasakhaNEWSClusteringS2S (fra)":33.58} -{"level_0":29,"index":29,"Rank":30,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.65,"AlloProfClusteringP2P":64.17,"AlloProfClusteringS2S":38.17,"HALClusteringS2S":24.09,"MLSUMClusteringP2P (fr)":43.8,"MLSUMClusteringS2S (fr)":37.75,"MasakhaNEWSClusteringP2P (fra)":40.8,"MasakhaNEWSClusteringS2S (fra)":28.8} -{"level_0":30,"index":68,"Rank":31,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":39.11,"AlloProfClusteringP2P":54.49,"AlloProfClusteringS2S":44.79,"HALClusteringS2S":23.97,"MLSUMClusteringP2P (fr)":40.55,"MLSUMClusteringS2S (fr)":37.53,"MasakhaNEWSClusteringP2P (fra)":41.57,"MasakhaNEWSClusteringS2S (fra)":30.88} -{"level_0":31,"index":47,"Rank":32,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":38.93,"AlloProfClusteringP2P":60.89,"AlloProfClusteringS2S":32.52,"HALClusteringS2S":18.95,"MLSUMClusteringP2P (fr)":43.2,"MLSUMClusteringS2S (fr)":37.61,"MasakhaNEWSClusteringP2P (fra)":40.12,"MasakhaNEWSClusteringS2S (fra)":39.22} -{"level_0":32,"index":14,"Rank":33,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.91,"AlloProfClusteringP2P":61.96,"AlloProfClusteringS2S":31.36,"HALClusteringS2S":17.31,"MLSUMClusteringP2P (fr)":42.8,"MLSUMClusteringS2S (fr)":32.72,"MasakhaNEWSClusteringP2P (fra)":56.81,"MasakhaNEWSClusteringS2S (fra)":29.41} -{"level_0":33,"index":45,"Rank":34,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":38.7,"AlloProfClusteringP2P":62.99,"AlloProfClusteringS2S":32.26,"HALClusteringS2S":22.44,"MLSUMClusteringP2P (fr)":44.04,"MLSUMClusteringS2S (fr)":37.65,"MasakhaNEWSClusteringP2P (fra)":40.94,"MasakhaNEWSClusteringS2S (fra)":30.56} -{"level_0":34,"index":63,"Rank":35,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":38.7,"AlloProfClusteringP2P":55.95,"AlloProfClusteringS2S":35.39,"HALClusteringS2S":18.2,"MLSUMClusteringP2P (fr)":40.17,"MLSUMClusteringS2S (fr)":34.65,"MasakhaNEWSClusteringP2P (fra)":53.76,"MasakhaNEWSClusteringS2S (fra)":32.76} -{"level_0":35,"index":23,"Rank":36,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.61,"AlloProfClusteringP2P":55.52,"AlloProfClusteringS2S":35.8,"HALClusteringS2S":23.14,"MLSUMClusteringP2P (fr)":40.31,"MLSUMClusteringS2S (fr)":40.05,"MasakhaNEWSClusteringP2P (fra)":45.03,"MasakhaNEWSClusteringS2S (fra)":30.39} -{"level_0":36,"index":67,"Rank":37,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":38.35,"AlloProfClusteringP2P":56.06,"AlloProfClusteringS2S":42.16,"HALClusteringS2S":23.21,"MLSUMClusteringP2P (fr)":39.97,"MLSUMClusteringS2S (fr)":36.55,"MasakhaNEWSClusteringP2P (fra)":36.58,"MasakhaNEWSClusteringS2S (fra)":33.9} -{"level_0":37,"index":57,"Rank":38,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":38.32,"AlloProfClusteringP2P":54.78,"AlloProfClusteringS2S":31.6,"HALClusteringS2S":20.62,"MLSUMClusteringP2P (fr)":42.09,"MLSUMClusteringS2S (fr)":34.84,"MasakhaNEWSClusteringP2P (fra)":46.16,"MasakhaNEWSClusteringS2S (fra)":38.13} -{"level_0":38,"index":50,"Rank":39,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.9,"AlloProfClusteringP2P":56.73,"AlloProfClusteringS2S":38.2,"HALClusteringS2S":24.13,"MLSUMClusteringP2P (fr)":42.12,"MLSUMClusteringS2S (fr)":36.69,"MasakhaNEWSClusteringP2P (fra)":34.61,"MasakhaNEWSClusteringS2S (fra)":32.81} -{"level_0":39,"index":77,"Rank":40,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.35,"AlloProfClusteringP2P":54.21,"AlloProfClusteringS2S":37.95,"HALClusteringS2S":18.94,"MLSUMClusteringP2P (fr)":41.02,"MLSUMClusteringS2S (fr)":37.97,"MasakhaNEWSClusteringP2P (fra)":24.09,"MasakhaNEWSClusteringS2S (fra)":40.24} -{"level_0":40,"index":35,"Rank":41,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":35.44,"AlloProfClusteringP2P":59.09,"AlloProfClusteringS2S":38.92,"HALClusteringS2S":20.22,"MLSUMClusteringP2P (fr)":35.98,"MLSUMClusteringS2S (fr)":27.05,"MasakhaNEWSClusteringP2P (fra)":36.03,"MasakhaNEWSClusteringS2S (fra)":30.77} -{"level_0":41,"index":48,"Rank":42,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":35.04,"AlloProfClusteringP2P":62.22,"AlloProfClusteringS2S":27.06,"HALClusteringS2S":13.86,"MLSUMClusteringP2P (fr)":44.11,"MLSUMClusteringS2S (fr)":30.47,"MasakhaNEWSClusteringP2P (fra)":40.2,"MasakhaNEWSClusteringS2S (fra)":27.35} -{"level_0":42,"index":59,"Rank":43,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":34.73,"AlloProfClusteringP2P":51.83,"AlloProfClusteringS2S":32.07,"HALClusteringS2S":18.84,"MLSUMClusteringP2P (fr)":36.74,"MLSUMClusteringS2S (fr)":28.12,"MasakhaNEWSClusteringP2P (fra)":34.92,"MasakhaNEWSClusteringS2S (fra)":40.58} -{"level_0":43,"index":17,"Rank":44,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":34.09,"AlloProfClusteringP2P":53.16,"AlloProfClusteringS2S":43.43,"HALClusteringS2S":20.26,"MLSUMClusteringP2P (fr)":41.22,"MLSUMClusteringS2S (fr)":31.88,"MasakhaNEWSClusteringP2P (fra)":24.23,"MasakhaNEWSClusteringS2S (fra)":24.46} -{"level_0":44,"index":18,"Rank":45,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":33.96,"AlloProfClusteringP2P":53.49,"AlloProfClusteringS2S":43.1,"HALClusteringS2S":19.78,"MLSUMClusteringP2P (fr)":40.73,"MLSUMClusteringS2S (fr)":31.94,"MasakhaNEWSClusteringP2P (fra)":24.23,"MasakhaNEWSClusteringS2S (fra)":24.46} -{"level_0":45,"index":16,"Rank":46,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":33.94,"AlloProfClusteringP2P":53.22,"AlloProfClusteringS2S":42.92,"HALClusteringS2S":19.94,"MLSUMClusteringP2P (fr)":40.96,"MLSUMClusteringS2S (fr)":31.87,"MasakhaNEWSClusteringP2P (fra)":24.23,"MasakhaNEWSClusteringS2S (fra)":24.46} -{"level_0":46,"index":41,"Rank":47,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":33.82,"AlloProfClusteringP2P":51.5,"AlloProfClusteringS2S":43.06,"HALClusteringS2S":20.81,"MLSUMClusteringP2P (fr)":40.9,"MLSUMClusteringS2S (fr)":31.8,"MasakhaNEWSClusteringP2P (fra)":24.23,"MasakhaNEWSClusteringS2S (fra)":24.46} -{"level_0":47,"index":58,"Rank":48,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":33.75,"AlloProfClusteringP2P":46.03,"AlloProfClusteringS2S":31.83,"HALClusteringS2S":19.58,"MLSUMClusteringP2P (fr)":34.35,"MLSUMClusteringS2S (fr)":29.3,"MasakhaNEWSClusteringP2P (fra)":42.72,"MasakhaNEWSClusteringS2S (fra)":32.47} -{"level_0":48,"index":75,"Rank":49,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":33.66,"AlloProfClusteringP2P":49.11,"AlloProfClusteringS2S":32.72,"HALClusteringS2S":16.19,"MLSUMClusteringP2P (fr)":36.19,"MLSUMClusteringS2S (fr)":30.39,"MasakhaNEWSClusteringP2P (fra)":38.51,"MasakhaNEWSClusteringS2S (fra)":32.51} -{"level_0":49,"index":66,"Rank":50,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":32.84,"AlloProfClusteringP2P":49.13,"AlloProfClusteringS2S":26.16,"HALClusteringS2S":12.49,"MLSUMClusteringP2P (fr)":35.15,"MLSUMClusteringS2S (fr)":25.95,"MasakhaNEWSClusteringP2P (fra)":53.73,"MasakhaNEWSClusteringS2S (fra)":27.27} -{"level_0":50,"index":5,"Rank":51,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":29.92,"AlloProfClusteringP2P":48.45,"AlloProfClusteringS2S":25.81,"HALClusteringS2S":11.52,"MLSUMClusteringP2P (fr)":34.53,"MLSUMClusteringS2S (fr)":27.35,"MasakhaNEWSClusteringP2P (fra)":32.04,"MasakhaNEWSClusteringS2S (fra)":29.77} -{"level_0":51,"index":49,"Rank":52,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.03,"AlloProfClusteringP2P":53.57,"AlloProfClusteringS2S":22.13,"HALClusteringS2S":7.68,"MLSUMClusteringP2P (fr)":36.43,"MLSUMClusteringS2S (fr)":25.26,"MasakhaNEWSClusteringP2P (fra)":37.57,"MasakhaNEWSClusteringS2S (fra)":20.58} -{"level_0":52,"index":79,"Rank":53,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":28.62,"AlloProfClusteringP2P":56.54,"AlloProfClusteringS2S":21.18,"HALClusteringS2S":5.94,"MLSUMClusteringP2P (fr)":42.67,"MLSUMClusteringS2S (fr)":18.5,"MasakhaNEWSClusteringP2P (fra)":34.02,"MasakhaNEWSClusteringS2S (fra)":21.52} -{"level_0":53,"index":78,"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":28.42,"AlloProfClusteringP2P":52.24,"AlloProfClusteringS2S":20.37,"HALClusteringS2S":8.68,"MLSUMClusteringP2P (fr)":40.44,"MLSUMClusteringS2S (fr)":24.14,"MasakhaNEWSClusteringP2P (fra)":29.29,"MasakhaNEWSClusteringS2S (fra)":23.76} -{"level_0":54,"index":38,"Rank":55,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":27.18,"AlloProfClusteringP2P":52.86,"AlloProfClusteringS2S":14.46,"HALClusteringS2S":3.85,"MLSUMClusteringP2P (fr)":39.06,"MLSUMClusteringS2S (fr)":17.13,"MasakhaNEWSClusteringP2P (fra)":41.61,"MasakhaNEWSClusteringS2S (fra)":21.26} -{"level_0":55,"index":40,"Rank":56,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":25.11,"AlloProfClusteringP2P":40.85,"AlloProfClusteringS2S":21.76,"HALClusteringS2S":5.26,"MLSUMClusteringP2P (fr)":38.09,"MLSUMClusteringS2S (fr)":18.71,"MasakhaNEWSClusteringP2P (fra)":26.43,"MasakhaNEWSClusteringS2S (fra)":24.68} -{"level_0":56,"index":39,"Rank":57,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":22.45,"AlloProfClusteringP2P":43.2,"AlloProfClusteringS2S":12.94,"HALClusteringS2S":1.8,"MLSUMClusteringP2P (fr)":33.22,"MLSUMClusteringS2S (fr)":14.9,"MasakhaNEWSClusteringP2P (fra)":28.49,"MasakhaNEWSClusteringS2S (fra)":22.58} -{"level_0":57,"index":34,"Rank":71,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AlloProfClusteringP2P":76.06,"AlloProfClusteringS2S":"","HALClusteringS2S":30.83,"MLSUMClusteringP2P (fr)":50.03,"MLSUMClusteringS2S (fr)":"","MasakhaNEWSClusteringP2P (fra)":60.19,"MasakhaNEWSClusteringS2S (fra)":""} -{"level_0":58,"index":55,"Rank":74,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AlloProfClusteringP2P":70.55,"AlloProfClusteringS2S":"","HALClusteringS2S":28.3,"MLSUMClusteringP2P (fr)":45.27,"MLSUMClusteringS2S (fr)":"","MasakhaNEWSClusteringP2P (fra)":71.04,"MasakhaNEWSClusteringS2S (fra)":""} -{"level_0":59,"index":56,"Rank":75,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AlloProfClusteringP2P":76.06,"AlloProfClusteringS2S":"","HALClusteringS2S":30.83,"MLSUMClusteringP2P (fr)":50.03,"MLSUMClusteringS2S (fr)":"","MasakhaNEWSClusteringP2P (fra)":60.19,"MasakhaNEWSClusteringS2S (fra)":""} +{"Rank":1,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.67,"AlloProfClusteringP2P":64.83,"AlloProfClusteringS2S":53.52,"HALClusteringS2S":26.18,"MLSUMClusteringP2P (fr)":44.59,"MLSUMClusteringS2S (fr)":41.67,"MasakhaNEWSClusteringP2P (fra)":68.35,"MasakhaNEWSClusteringS2S (fra)":48.58} +{"Rank":2,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.57,"AlloProfClusteringP2P":65.37,"AlloProfClusteringS2S":47.03,"HALClusteringS2S":27.67,"MLSUMClusteringP2P (fr)":45.99,"MLSUMClusteringS2S (fr)":45.57,"MasakhaNEWSClusteringP2P (fra)":44.53,"MasakhaNEWSClusteringS2S (fra)":49.8} +{"Rank":3,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.46,"AlloProfClusteringP2P":61.63,"AlloProfClusteringS2S":50.67,"HALClusteringS2S":27.44,"MLSUMClusteringP2P (fr)":45.23,"MLSUMClusteringS2S (fr)":41.48,"MasakhaNEWSClusteringP2P (fra)":56.59,"MasakhaNEWSClusteringS2S (fra)":35.18} +{"Rank":4,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.74,"AlloProfClusteringP2P":62.01,"AlloProfClusteringS2S":49.2,"HALClusteringS2S":26.17,"MLSUMClusteringP2P (fr)":45.28,"MLSUMClusteringS2S (fr)":42.74,"MasakhaNEWSClusteringP2P (fra)":48.13,"MasakhaNEWSClusteringS2S (fra)":39.62} +{"Rank":5,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.23,"AlloProfClusteringP2P":62.5,"AlloProfClusteringS2S":44.28,"HALClusteringS2S":26.36,"MLSUMClusteringP2P (fr)":44.03,"MLSUMClusteringS2S (fr)":42.95,"MasakhaNEWSClusteringP2P (fra)":50.68,"MasakhaNEWSClusteringS2S (fra)":38.79} +{"Rank":6,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":43.25,"AlloProfClusteringP2P":60.98,"AlloProfClusteringS2S":43.5,"HALClusteringS2S":21.4,"MLSUMClusteringP2P (fr)":42.24,"MLSUMClusteringS2S (fr)":35.25,"MasakhaNEWSClusteringP2P (fra)":61.15,"MasakhaNEWSClusteringS2S (fra)":38.24} +{"Rank":7,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":43.17,"AlloProfClusteringP2P":62.69,"AlloProfClusteringS2S":42.06,"HALClusteringS2S":23.9,"MLSUMClusteringP2P (fr)":42.04,"MLSUMClusteringS2S (fr)":32.29,"MasakhaNEWSClusteringP2P (fra)":54.51,"MasakhaNEWSClusteringS2S (fra)":44.73} +{"Rank":8,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":42.66,"AlloProfClusteringP2P":62.09,"AlloProfClusteringS2S":32.98,"HALClusteringS2S":22.48,"MLSUMClusteringP2P (fr)":43.48,"MLSUMClusteringS2S (fr)":38.53,"MasakhaNEWSClusteringP2P (fra)":47.91,"MasakhaNEWSClusteringS2S (fra)":51.16} +{"Rank":9,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.0,"AlloProfClusteringP2P":56.9,"AlloProfClusteringS2S":37.84,"HALClusteringS2S":18.95,"MLSUMClusteringP2P (fr)":43.9,"MLSUMClusteringS2S (fr)":35.5,"MasakhaNEWSClusteringP2P (fra)":60.57,"MasakhaNEWSClusteringS2S (fra)":40.31} +{"Rank":10,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.88,"AlloProfClusteringP2P":57.96,"AlloProfClusteringS2S":41.65,"HALClusteringS2S":24.84,"MLSUMClusteringP2P (fr)":45.08,"MLSUMClusteringS2S (fr)":38.77,"MasakhaNEWSClusteringP2P (fra)":48.54,"MasakhaNEWSClusteringS2S (fra)":36.33} +{"Rank":11,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":41.7,"AlloProfClusteringP2P":64.12,"AlloProfClusteringS2S":32.52,"HALClusteringS2S":23.4,"MLSUMClusteringP2P (fr)":42.94,"MLSUMClusteringS2S (fr)":33.91,"MasakhaNEWSClusteringP2P (fra)":53.94,"MasakhaNEWSClusteringS2S (fra)":41.05} +{"Rank":12,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":41.6,"AlloProfClusteringP2P":60.37,"AlloProfClusteringS2S":40.76,"HALClusteringS2S":20.28,"MLSUMClusteringP2P (fr)":41.61,"MLSUMClusteringS2S (fr)":33.6,"MasakhaNEWSClusteringP2P (fra)":62.82,"MasakhaNEWSClusteringS2S (fra)":31.74} +{"Rank":13,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":41.16,"AlloProfClusteringP2P":61.06,"AlloProfClusteringS2S":28.12,"HALClusteringS2S":19.69,"MLSUMClusteringP2P (fr)":45.59,"MLSUMClusteringS2S (fr)":32.0,"MasakhaNEWSClusteringP2P (fra)":52.47,"MasakhaNEWSClusteringS2S (fra)":49.2} +{"Rank":14,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.7,"AlloProfClusteringP2P":63.53,"AlloProfClusteringS2S":36.18,"HALClusteringS2S":19.9,"MLSUMClusteringP2P (fr)":45.08,"MLSUMClusteringS2S (fr)":34.75,"MasakhaNEWSClusteringP2P (fra)":53.18,"MasakhaNEWSClusteringS2S (fra)":32.31} +{"Rank":15,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":40.6,"AlloProfClusteringP2P":61.82,"AlloProfClusteringS2S":39.78,"HALClusteringS2S":18.73,"MLSUMClusteringP2P (fr)":42.07,"MLSUMClusteringS2S (fr)":31.87,"MasakhaNEWSClusteringP2P (fra)":58.6,"MasakhaNEWSClusteringS2S (fra)":31.33} +{"Rank":16,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":40.43,"AlloProfClusteringP2P":60.66,"AlloProfClusteringS2S":35.05,"HALClusteringS2S":20.9,"MLSUMClusteringP2P (fr)":43.5,"MLSUMClusteringS2S (fr)":30.99,"MasakhaNEWSClusteringP2P (fra)":49.71,"MasakhaNEWSClusteringS2S (fra)":42.23} +{"Rank":17,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":40.07,"AlloProfClusteringP2P":58.44,"AlloProfClusteringS2S":35.93,"HALClusteringS2S":17.72,"MLSUMClusteringP2P (fr)":40.77,"MLSUMClusteringS2S (fr)":30.06,"MasakhaNEWSClusteringP2P (fra)":61.9,"MasakhaNEWSClusteringS2S (fra)":35.64} +{"Rank":18,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":39.11,"AlloProfClusteringP2P":54.49,"AlloProfClusteringS2S":44.79,"HALClusteringS2S":23.97,"MLSUMClusteringP2P (fr)":40.55,"MLSUMClusteringS2S (fr)":37.53,"MasakhaNEWSClusteringP2P (fra)":41.57,"MasakhaNEWSClusteringS2S (fra)":30.88} +{"Rank":19,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":38.93,"AlloProfClusteringP2P":60.89,"AlloProfClusteringS2S":32.52,"HALClusteringS2S":18.95,"MLSUMClusteringP2P (fr)":43.2,"MLSUMClusteringS2S (fr)":37.61,"MasakhaNEWSClusteringP2P (fra)":40.12,"MasakhaNEWSClusteringS2S (fra)":39.22} +{"Rank":20,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.91,"AlloProfClusteringP2P":61.96,"AlloProfClusteringS2S":31.36,"HALClusteringS2S":17.31,"MLSUMClusteringP2P (fr)":42.8,"MLSUMClusteringS2S (fr)":32.72,"MasakhaNEWSClusteringP2P (fra)":56.81,"MasakhaNEWSClusteringS2S (fra)":29.41} +{"Rank":21,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":38.7,"AlloProfClusteringP2P":62.99,"AlloProfClusteringS2S":32.26,"HALClusteringS2S":22.44,"MLSUMClusteringP2P (fr)":44.04,"MLSUMClusteringS2S (fr)":37.65,"MasakhaNEWSClusteringP2P (fra)":40.94,"MasakhaNEWSClusteringS2S (fra)":30.56} +{"Rank":22,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":38.7,"AlloProfClusteringP2P":55.95,"AlloProfClusteringS2S":35.39,"HALClusteringS2S":18.2,"MLSUMClusteringP2P (fr)":40.17,"MLSUMClusteringS2S (fr)":34.65,"MasakhaNEWSClusteringP2P (fra)":53.76,"MasakhaNEWSClusteringS2S (fra)":32.76} +{"Rank":23,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":38.35,"AlloProfClusteringP2P":56.06,"AlloProfClusteringS2S":42.16,"HALClusteringS2S":23.21,"MLSUMClusteringP2P (fr)":39.97,"MLSUMClusteringS2S (fr)":36.55,"MasakhaNEWSClusteringP2P (fra)":36.58,"MasakhaNEWSClusteringS2S (fra)":33.9} +{"Rank":24,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":38.32,"AlloProfClusteringP2P":54.78,"AlloProfClusteringS2S":31.6,"HALClusteringS2S":20.62,"MLSUMClusteringP2P (fr)":42.09,"MLSUMClusteringS2S (fr)":34.84,"MasakhaNEWSClusteringP2P (fra)":46.16,"MasakhaNEWSClusteringS2S (fra)":38.13} +{"Rank":25,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.35,"AlloProfClusteringP2P":54.21,"AlloProfClusteringS2S":37.95,"HALClusteringS2S":18.94,"MLSUMClusteringP2P (fr)":41.02,"MLSUMClusteringS2S (fr)":37.97,"MasakhaNEWSClusteringP2P (fra)":24.09,"MasakhaNEWSClusteringS2S (fra)":40.24} +{"Rank":26,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":35.44,"AlloProfClusteringP2P":59.09,"AlloProfClusteringS2S":38.92,"HALClusteringS2S":20.22,"MLSUMClusteringP2P (fr)":35.98,"MLSUMClusteringS2S (fr)":27.05,"MasakhaNEWSClusteringP2P (fra)":36.03,"MasakhaNEWSClusteringS2S (fra)":30.77} +{"Rank":27,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":35.04,"AlloProfClusteringP2P":62.22,"AlloProfClusteringS2S":27.06,"HALClusteringS2S":13.86,"MLSUMClusteringP2P (fr)":44.11,"MLSUMClusteringS2S (fr)":30.47,"MasakhaNEWSClusteringP2P (fra)":40.2,"MasakhaNEWSClusteringS2S (fra)":27.35} +{"Rank":28,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":34.73,"AlloProfClusteringP2P":51.83,"AlloProfClusteringS2S":32.07,"HALClusteringS2S":18.84,"MLSUMClusteringP2P (fr)":36.74,"MLSUMClusteringS2S (fr)":28.12,"MasakhaNEWSClusteringP2P (fra)":34.92,"MasakhaNEWSClusteringS2S (fra)":40.58} +{"Rank":29,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":34.09,"AlloProfClusteringP2P":53.16,"AlloProfClusteringS2S":43.43,"HALClusteringS2S":20.26,"MLSUMClusteringP2P (fr)":41.22,"MLSUMClusteringS2S (fr)":31.88,"MasakhaNEWSClusteringP2P (fra)":24.23,"MasakhaNEWSClusteringS2S (fra)":24.46} +{"Rank":30,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":33.96,"AlloProfClusteringP2P":53.49,"AlloProfClusteringS2S":43.1,"HALClusteringS2S":19.78,"MLSUMClusteringP2P (fr)":40.73,"MLSUMClusteringS2S (fr)":31.94,"MasakhaNEWSClusteringP2P (fra)":24.23,"MasakhaNEWSClusteringS2S (fra)":24.46} +{"Rank":31,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":33.94,"AlloProfClusteringP2P":53.22,"AlloProfClusteringS2S":42.92,"HALClusteringS2S":19.94,"MLSUMClusteringP2P (fr)":40.96,"MLSUMClusteringS2S (fr)":31.87,"MasakhaNEWSClusteringP2P (fra)":24.23,"MasakhaNEWSClusteringS2S (fra)":24.46} +{"Rank":32,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":33.82,"AlloProfClusteringP2P":51.5,"AlloProfClusteringS2S":43.06,"HALClusteringS2S":20.81,"MLSUMClusteringP2P (fr)":40.9,"MLSUMClusteringS2S (fr)":31.8,"MasakhaNEWSClusteringP2P (fra)":24.23,"MasakhaNEWSClusteringS2S (fra)":24.46} +{"Rank":33,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":33.75,"AlloProfClusteringP2P":46.03,"AlloProfClusteringS2S":31.83,"HALClusteringS2S":19.58,"MLSUMClusteringP2P (fr)":34.35,"MLSUMClusteringS2S (fr)":29.3,"MasakhaNEWSClusteringP2P (fra)":42.72,"MasakhaNEWSClusteringS2S (fra)":32.47} +{"Rank":34,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":33.66,"AlloProfClusteringP2P":49.11,"AlloProfClusteringS2S":32.72,"HALClusteringS2S":16.19,"MLSUMClusteringP2P (fr)":36.19,"MLSUMClusteringS2S (fr)":30.39,"MasakhaNEWSClusteringP2P (fra)":38.51,"MasakhaNEWSClusteringS2S (fra)":32.51} +{"Rank":35,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":32.84,"AlloProfClusteringP2P":49.13,"AlloProfClusteringS2S":26.16,"HALClusteringS2S":12.49,"MLSUMClusteringP2P (fr)":35.15,"MLSUMClusteringS2S (fr)":25.95,"MasakhaNEWSClusteringP2P (fra)":53.73,"MasakhaNEWSClusteringS2S (fra)":27.27} +{"Rank":36,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":29.92,"AlloProfClusteringP2P":48.45,"AlloProfClusteringS2S":25.81,"HALClusteringS2S":11.52,"MLSUMClusteringP2P (fr)":34.53,"MLSUMClusteringS2S (fr)":27.35,"MasakhaNEWSClusteringP2P (fra)":32.04,"MasakhaNEWSClusteringS2S (fra)":29.77} +{"Rank":37,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.03,"AlloProfClusteringP2P":53.57,"AlloProfClusteringS2S":22.13,"HALClusteringS2S":7.68,"MLSUMClusteringP2P (fr)":36.43,"MLSUMClusteringS2S (fr)":25.26,"MasakhaNEWSClusteringP2P (fra)":37.57,"MasakhaNEWSClusteringS2S (fra)":20.58} +{"Rank":38,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":28.62,"AlloProfClusteringP2P":56.54,"AlloProfClusteringS2S":21.18,"HALClusteringS2S":5.94,"MLSUMClusteringP2P (fr)":42.67,"MLSUMClusteringS2S (fr)":18.5,"MasakhaNEWSClusteringP2P (fra)":34.02,"MasakhaNEWSClusteringS2S (fra)":21.52} +{"Rank":39,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":28.42,"AlloProfClusteringP2P":52.24,"AlloProfClusteringS2S":20.37,"HALClusteringS2S":8.68,"MLSUMClusteringP2P (fr)":40.44,"MLSUMClusteringS2S (fr)":24.14,"MasakhaNEWSClusteringP2P (fra)":29.29,"MasakhaNEWSClusteringS2S (fra)":23.76} +{"Rank":40,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":27.18,"AlloProfClusteringP2P":52.86,"AlloProfClusteringS2S":14.46,"HALClusteringS2S":3.85,"MLSUMClusteringP2P (fr)":39.06,"MLSUMClusteringS2S (fr)":17.13,"MasakhaNEWSClusteringP2P (fra)":41.61,"MasakhaNEWSClusteringS2S (fra)":21.26} +{"Rank":41,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":25.11,"AlloProfClusteringP2P":40.85,"AlloProfClusteringS2S":21.76,"HALClusteringS2S":5.26,"MLSUMClusteringP2P (fr)":38.09,"MLSUMClusteringS2S (fr)":18.71,"MasakhaNEWSClusteringP2P (fra)":26.43,"MasakhaNEWSClusteringS2S (fra)":24.68} +{"Rank":42,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":22.45,"AlloProfClusteringP2P":43.2,"AlloProfClusteringS2S":12.94,"HALClusteringS2S":1.8,"MLSUMClusteringP2P (fr)":33.22,"MLSUMClusteringS2S (fr)":14.9,"MasakhaNEWSClusteringP2P (fra)":28.49,"MasakhaNEWSClusteringS2S (fra)":22.58} +{"Rank":43,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":44,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":45,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":46,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":47,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":48,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":49,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":50,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":51,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":52,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":53,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":54,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":55,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":56,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloProfClusteringP2P":null,"AlloProfClusteringS2S":null,"HALClusteringS2S":null,"MLSUMClusteringP2P (fr)":null,"MLSUMClusteringS2S (fr)":null,"MasakhaNEWSClusteringP2P (fra)":null,"MasakhaNEWSClusteringS2S (fra)":null} diff --git a/boards_data/fr/data_tasks/PairClassification/default.jsonl b/boards_data/fr/data_tasks/PairClassification/default.jsonl index 57688fcf2dc34dbf2c613bcc78c9f6dc7525109f..69246aa27835d4fc3a608c7f2c5bf7d956820db8 100644 --- a/boards_data/fr/data_tasks/PairClassification/default.jsonl +++ b/boards_data/fr/data_tasks/PairClassification/default.jsonl @@ -1,61 +1,57 @@ -{"level_0":0,"index":9,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":90.43,"OpusparcusPC (fr)":100.0,"PawsXPairClassification (fr)":80.86} -{"level_0":1,"index":34,"Rank":2,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":90.43,"OpusparcusPC (fr)":100.0,"PawsXPairClassification (fr)":80.86} -{"level_0":2,"index":56,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":90.43,"OpusparcusPC (fr)":100.0,"PawsXPairClassification (fr)":80.86} -{"level_0":3,"index":55,"Rank":4,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.88,"OpusparcusPC (fr)":100.0,"PawsXPairClassification (fr)":73.77} -{"level_0":4,"index":8,"Rank":5,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.88,"OpusparcusPC (fr)":100.0,"PawsXPairClassification (fr)":73.77} -{"level_0":5,"index":10,"Rank":6,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.07,"OpusparcusPC (fr)":100.0,"PawsXPairClassification (fr)":70.14} -{"level_0":6,"index":5,"Rank":7,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":81.65,"OpusparcusPC (fr)":93.77,"PawsXPairClassification (fr)":69.53} -{"level_0":7,"index":7,"Rank":8,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.54,"OpusparcusPC (fr)":100.0,"PawsXPairClassification (fr)":61.07} -{"level_0":8,"index":52,"Rank":9,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.23,"OpusparcusPC (fr)":93.48,"PawsXPairClassification (fr)":66.98} -{"level_0":9,"index":54,"Rank":10,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.15,"OpusparcusPC (fr)":93.37,"PawsXPairClassification (fr)":66.92} -{"level_0":10,"index":22,"Rank":11,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.64,"OpusparcusPC (fr)":94.77,"PawsXPairClassification (fr)":64.51} -{"level_0":11,"index":53,"Rank":12,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.57,"OpusparcusPC (fr)":93.72,"PawsXPairClassification (fr)":65.42} -{"level_0":12,"index":72,"Rank":13,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":78.96,"OpusparcusPC (fr)":93.94,"PawsXPairClassification (fr)":63.98} -{"level_0":13,"index":75,"Rank":14,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.81,"OpusparcusPC (fr)":92.04,"PawsXPairClassification (fr)":65.57} -{"level_0":14,"index":4,"Rank":15,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.66,"OpusparcusPC (fr)":93.68,"PawsXPairClassification (fr)":63.64} -{"level_0":15,"index":23,"Rank":16,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.3,"OpusparcusPC (fr)":93.15,"PawsXPairClassification (fr)":63.44} -{"level_0":16,"index":15,"Rank":17,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.67,"OpusparcusPC (fr)":94.08,"PawsXPairClassification (fr)":61.26} -{"level_0":17,"index":71,"Rank":18,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":77.5,"OpusparcusPC (fr)":92.48,"PawsXPairClassification (fr)":62.52} -{"level_0":18,"index":0,"Rank":19,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.32,"OpusparcusPC (fr)":92.61,"PawsXPairClassification (fr)":62.02} -{"level_0":19,"index":3,"Rank":20,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.3,"OpusparcusPC (fr)":93.06,"PawsXPairClassification (fr)":61.54} -{"level_0":20,"index":30,"Rank":21,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":77.28,"OpusparcusPC (fr)":91.42,"PawsXPairClassification (fr)":63.13} -{"level_0":21,"index":81,"Rank":22,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.14,"OpusparcusPC (fr)":94.12,"PawsXPairClassification (fr)":60.16} -{"level_0":22,"index":36,"Rank":23,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":77.11,"OpusparcusPC (fr)":94.63,"PawsXPairClassification (fr)":59.59} -{"level_0":23,"index":29,"Rank":24,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.1,"OpusparcusPC (fr)":94.02,"PawsXPairClassification (fr)":60.19} -{"level_0":24,"index":50,"Rank":25,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.1,"OpusparcusPC (fr)":93.34,"PawsXPairClassification (fr)":60.85} -{"level_0":25,"index":21,"Rank":26,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.01,"OpusparcusPC (fr)":93.64,"PawsXPairClassification (fr)":60.38} -{"level_0":26,"index":80,"Rank":27,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.97,"OpusparcusPC (fr)":93.18,"PawsXPairClassification (fr)":60.76} -{"level_0":27,"index":2,"Rank":28,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.85,"OpusparcusPC (fr)":92.87,"PawsXPairClassification (fr)":60.83} -{"level_0":28,"index":45,"Rank":29,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":76.19,"OpusparcusPC (fr)":93.89,"PawsXPairClassification (fr)":58.5} -{"level_0":29,"index":43,"Rank":30,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":76.08,"OpusparcusPC (fr)":88.5,"PawsXPairClassification (fr)":63.65} -{"level_0":30,"index":68,"Rank":31,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":75.8,"OpusparcusPC (fr)":93.45,"PawsXPairClassification (fr)":58.14} -{"level_0":31,"index":24,"Rank":32,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.61,"OpusparcusPC (fr)":92.6,"PawsXPairClassification (fr)":58.63} -{"level_0":32,"index":70,"Rank":33,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":75.39,"OpusparcusPC (fr)":91.19,"PawsXPairClassification (fr)":59.59} -{"level_0":33,"index":44,"Rank":34,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.82,"OpusparcusPC (fr)":92.72,"PawsXPairClassification (fr)":56.93} -{"level_0":34,"index":35,"Rank":35,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":74.74,"OpusparcusPC (fr)":92.05,"PawsXPairClassification (fr)":57.44} -{"level_0":35,"index":67,"Rank":36,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":74.47,"OpusparcusPC (fr)":92.01,"PawsXPairClassification (fr)":56.94} -{"level_0":36,"index":1,"Rank":37,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.36,"OpusparcusPC (fr)":89.76,"PawsXPairClassification (fr)":58.96} -{"level_0":37,"index":57,"Rank":38,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":74.3,"OpusparcusPC (fr)":93.96,"PawsXPairClassification (fr)":54.63} -{"level_0":38,"index":14,"Rank":39,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.12,"OpusparcusPC (fr)":90.92,"PawsXPairClassification (fr)":57.32} -{"level_0":39,"index":47,"Rank":40,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":74.1,"OpusparcusPC (fr)":92.52,"PawsXPairClassification (fr)":55.68} -{"level_0":40,"index":48,"Rank":41,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.76,"OpusparcusPC (fr)":85.54,"PawsXPairClassification (fr)":61.99} -{"level_0":41,"index":77,"Rank":42,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.5,"OpusparcusPC (fr)":93.38,"PawsXPairClassification (fr)":53.62} -{"level_0":42,"index":66,"Rank":43,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":72.72,"OpusparcusPC (fr)":88.07,"PawsXPairClassification (fr)":57.36} -{"level_0":43,"index":69,"Rank":44,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":72.38,"OpusparcusPC (fr)":89.4,"PawsXPairClassification (fr)":55.35} -{"level_0":44,"index":76,"Rank":45,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.92,"OpusparcusPC (fr)":91.46,"PawsXPairClassification (fr)":52.39} -{"level_0":45,"index":63,"Rank":46,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":71.57,"OpusparcusPC (fr)":92.07,"PawsXPairClassification (fr)":51.08} -{"level_0":46,"index":59,"Rank":47,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":70.96,"OpusparcusPC (fr)":86.53,"PawsXPairClassification (fr)":55.4} -{"level_0":47,"index":49,"Rank":48,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.9,"OpusparcusPC (fr)":82.1,"PawsXPairClassification (fr)":59.69} -{"level_0":48,"index":42,"Rank":49,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":70.32,"OpusparcusPC (fr)":87.43,"PawsXPairClassification (fr)":53.22} -{"level_0":49,"index":16,"Rank":50,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":70.1,"OpusparcusPC (fr)":86.79,"PawsXPairClassification (fr)":53.4} -{"level_0":50,"index":18,"Rank":51,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":70.09,"OpusparcusPC (fr)":86.79,"PawsXPairClassification (fr)":53.39} -{"level_0":51,"index":17,"Rank":52,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":70.08,"OpusparcusPC (fr)":86.78,"PawsXPairClassification (fr)":53.38} -{"level_0":52,"index":41,"Rank":53,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":70.08,"OpusparcusPC (fr)":86.77,"PawsXPairClassification (fr)":53.39} -{"level_0":53,"index":79,"Rank":54,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":68.56,"OpusparcusPC (fr)":83.73,"PawsXPairClassification (fr)":53.38} -{"level_0":54,"index":78,"Rank":55,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":68.4,"OpusparcusPC (fr)":85.45,"PawsXPairClassification (fr)":51.35} -{"level_0":55,"index":39,"Rank":56,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":67.39,"OpusparcusPC (fr)":82.0,"PawsXPairClassification (fr)":52.78} -{"level_0":56,"index":38,"Rank":57,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":67.02,"OpusparcusPC (fr)":82.15,"PawsXPairClassification (fr)":51.89} -{"level_0":57,"index":40,"Rank":58,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":64.46,"OpusparcusPC (fr)":74.78,"PawsXPairClassification (fr)":54.14} -{"level_0":58,"index":37,"Rank":72,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","OpusparcusPC (fr)":"","PawsXPairClassification (fr)":71.36} -{"level_0":59,"index":51,"Rank":74,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","OpusparcusPC (fr)":"","PawsXPairClassification (fr)":66.96} -{"level_0":60,"index":82,"Rank":83,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","OpusparcusPC (fr)":94.45,"PawsXPairClassification (fr)":""} +{"Rank":1,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":81.65,"OpusparcusPC (fr)":93.77,"PawsXPairClassification (fr)":69.53} +{"Rank":2,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":78.96,"OpusparcusPC (fr)":93.94,"PawsXPairClassification (fr)":63.98} +{"Rank":3,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.81,"OpusparcusPC (fr)":92.04,"PawsXPairClassification (fr)":65.57} +{"Rank":4,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.66,"OpusparcusPC (fr)":93.68,"PawsXPairClassification (fr)":63.64} +{"Rank":5,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.67,"OpusparcusPC (fr)":94.08,"PawsXPairClassification (fr)":61.26} +{"Rank":6,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":77.5,"OpusparcusPC (fr)":92.48,"PawsXPairClassification (fr)":62.52} +{"Rank":7,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.32,"OpusparcusPC (fr)":92.61,"PawsXPairClassification (fr)":62.02} +{"Rank":8,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.3,"OpusparcusPC (fr)":93.06,"PawsXPairClassification (fr)":61.54} +{"Rank":9,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":77.28,"OpusparcusPC (fr)":91.42,"PawsXPairClassification (fr)":63.13} +{"Rank":10,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.14,"OpusparcusPC (fr)":94.12,"PawsXPairClassification (fr)":60.16} +{"Rank":11,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":77.11,"OpusparcusPC (fr)":94.63,"PawsXPairClassification (fr)":59.59} +{"Rank":12,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.85,"OpusparcusPC (fr)":92.87,"PawsXPairClassification (fr)":60.83} +{"Rank":13,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":76.19,"OpusparcusPC (fr)":93.89,"PawsXPairClassification (fr)":58.5} +{"Rank":14,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":76.08,"OpusparcusPC (fr)":88.5,"PawsXPairClassification (fr)":63.65} +{"Rank":15,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":75.8,"OpusparcusPC (fr)":93.45,"PawsXPairClassification (fr)":58.14} +{"Rank":16,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":75.39,"OpusparcusPC (fr)":91.19,"PawsXPairClassification (fr)":59.59} +{"Rank":17,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.82,"OpusparcusPC (fr)":92.72,"PawsXPairClassification (fr)":56.93} +{"Rank":18,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":74.74,"OpusparcusPC (fr)":92.05,"PawsXPairClassification (fr)":57.44} +{"Rank":19,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":74.47,"OpusparcusPC (fr)":92.01,"PawsXPairClassification (fr)":56.94} +{"Rank":20,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.36,"OpusparcusPC (fr)":89.76,"PawsXPairClassification (fr)":58.96} +{"Rank":21,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":74.3,"OpusparcusPC (fr)":93.96,"PawsXPairClassification (fr)":54.63} +{"Rank":22,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.12,"OpusparcusPC (fr)":90.92,"PawsXPairClassification (fr)":57.32} +{"Rank":23,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":74.1,"OpusparcusPC (fr)":92.52,"PawsXPairClassification (fr)":55.68} +{"Rank":24,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.76,"OpusparcusPC (fr)":85.54,"PawsXPairClassification (fr)":61.99} +{"Rank":25,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.5,"OpusparcusPC (fr)":93.38,"PawsXPairClassification (fr)":53.62} +{"Rank":26,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":72.72,"OpusparcusPC (fr)":88.07,"PawsXPairClassification (fr)":57.36} +{"Rank":27,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":72.38,"OpusparcusPC (fr)":89.4,"PawsXPairClassification (fr)":55.35} +{"Rank":28,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.92,"OpusparcusPC (fr)":91.46,"PawsXPairClassification (fr)":52.39} +{"Rank":29,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":71.57,"OpusparcusPC (fr)":92.07,"PawsXPairClassification (fr)":51.08} +{"Rank":30,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":70.96,"OpusparcusPC (fr)":86.53,"PawsXPairClassification (fr)":55.4} +{"Rank":31,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.9,"OpusparcusPC (fr)":82.1,"PawsXPairClassification (fr)":59.69} +{"Rank":32,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":70.32,"OpusparcusPC (fr)":87.43,"PawsXPairClassification (fr)":53.22} +{"Rank":33,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":70.1,"OpusparcusPC (fr)":86.79,"PawsXPairClassification (fr)":53.4} +{"Rank":34,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":70.09,"OpusparcusPC (fr)":86.79,"PawsXPairClassification (fr)":53.39} +{"Rank":35,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":70.08,"OpusparcusPC (fr)":86.78,"PawsXPairClassification (fr)":53.38} +{"Rank":36,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":70.08,"OpusparcusPC (fr)":86.77,"PawsXPairClassification (fr)":53.39} +{"Rank":37,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":68.56,"OpusparcusPC (fr)":83.73,"PawsXPairClassification (fr)":53.38} +{"Rank":38,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":68.4,"OpusparcusPC (fr)":85.45,"PawsXPairClassification (fr)":51.35} +{"Rank":39,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":67.39,"OpusparcusPC (fr)":82.0,"PawsXPairClassification (fr)":52.78} +{"Rank":40,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":67.02,"OpusparcusPC (fr)":82.15,"PawsXPairClassification (fr)":51.89} +{"Rank":41,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":64.46,"OpusparcusPC (fr)":74.78,"PawsXPairClassification (fr)":54.14} +{"Rank":42,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":43,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":44,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":45,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":46,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":47,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":48,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":49,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":50,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":51,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":52,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":53,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":54,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":55,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":56,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"OpusparcusPC (fr)":null,"PawsXPairClassification (fr)":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"OpusparcusPC (fr)":94.45,"PawsXPairClassification (fr)":null} diff --git a/boards_data/fr/data_tasks/Reranking/default.jsonl b/boards_data/fr/data_tasks/Reranking/default.jsonl index b87d1b3bd61e768ae36a264b6b4069e23ce93bd5..80ffbd841925016e1c5659a97c6e96210c32ad7a 100644 --- a/boards_data/fr/data_tasks/Reranking/default.jsonl +++ b/boards_data/fr/data_tasks/Reranking/default.jsonl @@ -1,58 +1,57 @@ -{"level_0":0,"index":10,"Rank":1,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.22,"AlloprofReranking":78.62,"SyntecReranking":91.83} -{"level_0":1,"index":55,"Rank":2,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.76,"AlloprofReranking":73.49,"SyntecReranking":94.03} -{"level_0":2,"index":8,"Rank":3,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.76,"AlloprofReranking":73.49,"SyntecReranking":94.03} -{"level_0":3,"index":4,"Rank":4,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.59,"AlloprofReranking":74.78,"SyntecReranking":90.4} -{"level_0":4,"index":3,"Rank":5,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.06,"AlloprofReranking":72.92,"SyntecReranking":91.2} -{"level_0":5,"index":0,"Rank":6,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.46,"AlloprofReranking":72.36,"SyntecReranking":88.57} -{"level_0":6,"index":23,"Rank":7,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.35,"AlloprofReranking":73.1,"SyntecReranking":87.6} -{"level_0":7,"index":22,"Rank":8,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.79,"AlloprofReranking":73.63,"SyntecReranking":85.95} -{"level_0":8,"index":2,"Rank":9,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.78,"AlloprofReranking":70.79,"SyntecReranking":86.77} -{"level_0":9,"index":56,"Rank":10,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.7,"AlloprofReranking":73.08,"SyntecReranking":84.32} -{"level_0":10,"index":34,"Rank":11,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.7,"AlloprofReranking":73.08,"SyntecReranking":84.32} -{"level_0":11,"index":9,"Rank":12,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":78.7,"AlloprofReranking":73.08,"SyntecReranking":84.32} -{"level_0":12,"index":21,"Rank":13,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.14,"AlloprofReranking":70.46,"SyntecReranking":83.83} -{"level_0":13,"index":72,"Rank":14,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":76.76,"AlloprofReranking":68.36,"SyntecReranking":85.15} -{"level_0":14,"index":29,"Rank":15,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.58,"AlloprofReranking":64.88,"SyntecReranking":88.28} -{"level_0":15,"index":54,"Rank":16,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.43,"AlloprofReranking":68.79,"SyntecReranking":84.07} -{"level_0":16,"index":24,"Rank":17,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.0,"AlloprofReranking":69.5,"SyntecReranking":82.5} -{"level_0":17,"index":80,"Rank":18,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.75,"AlloprofReranking":68.73,"SyntecReranking":82.77} -{"level_0":18,"index":53,"Rank":19,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.48,"AlloprofReranking":68.31,"SyntecReranking":82.65} -{"level_0":19,"index":50,"Rank":20,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.3,"AlloprofReranking":65.17,"SyntecReranking":85.43} -{"level_0":20,"index":52,"Rank":21,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.2,"AlloprofReranking":67.24,"SyntecReranking":83.17} -{"level_0":21,"index":71,"Rank":22,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":73.18,"AlloprofReranking":63.3,"SyntecReranking":83.07} -{"level_0":22,"index":1,"Rank":23,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.1,"AlloprofReranking":63.54,"SyntecReranking":82.65} -{"level_0":23,"index":36,"Rank":24,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":72.89,"AlloprofReranking":57.62,"SyntecReranking":88.15} -{"level_0":24,"index":45,"Rank":25,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":72.14,"AlloprofReranking":57.37,"SyntecReranking":86.9} -{"level_0":25,"index":44,"Rank":26,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":71.76,"AlloprofReranking":58.1,"SyntecReranking":85.43} -{"level_0":26,"index":47,"Rank":27,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":71.44,"AlloprofReranking":56.17,"SyntecReranking":86.7} -{"level_0":27,"index":14,"Rank":28,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.82,"AlloprofReranking":51.6,"SyntecReranking":88.03} -{"level_0":28,"index":70,"Rank":29,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":68.88,"AlloprofReranking":57.99,"SyntecReranking":79.77} -{"level_0":29,"index":68,"Rank":30,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":68.78,"AlloprofReranking":54.34,"SyntecReranking":83.23} -{"level_0":30,"index":15,"Rank":31,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.36,"AlloprofReranking":51.01,"SyntecReranking":85.72} -{"level_0":31,"index":30,"Rank":32,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":67.95,"AlloprofReranking":53.0,"SyntecReranking":82.9} -{"level_0":32,"index":77,"Rank":33,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.26,"AlloprofReranking":55.39,"SyntecReranking":77.13} -{"level_0":33,"index":76,"Rank":34,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.04,"AlloprofReranking":56.23,"SyntecReranking":73.85} -{"level_0":34,"index":35,"Rank":35,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":64.22,"AlloprofReranking":48.68,"SyntecReranking":79.75} -{"level_0":35,"index":69,"Rank":36,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":64.08,"AlloprofReranking":50.12,"SyntecReranking":78.05} -{"level_0":36,"index":63,"Rank":37,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":63.28,"AlloprofReranking":51.77,"SyntecReranking":74.78} -{"level_0":37,"index":43,"Rank":38,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":62.2,"AlloprofReranking":47.36,"SyntecReranking":77.05} -{"level_0":38,"index":67,"Rank":39,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":62.02,"AlloprofReranking":49.01,"SyntecReranking":75.03} -{"level_0":39,"index":57,"Rank":40,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":61.4,"AlloprofReranking":49.51,"SyntecReranking":73.28} -{"level_0":40,"index":75,"Rank":41,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.88,"AlloprofReranking":51.48,"SyntecReranking":70.28} -{"level_0":41,"index":66,"Rank":42,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":52.68,"AlloprofReranking":40.28,"SyntecReranking":65.08} -{"level_0":42,"index":42,"Rank":43,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":52.62,"AlloprofReranking":38.85,"SyntecReranking":66.4} -{"level_0":43,"index":48,"Rank":44,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.86,"AlloprofReranking":39.13,"SyntecReranking":62.58} -{"level_0":44,"index":7,"Rank":45,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.98,"AlloprofReranking":37.09,"SyntecReranking":62.87} -{"level_0":45,"index":39,"Rank":46,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":45.86,"AlloprofReranking":34.55,"SyntecReranking":57.18} -{"level_0":46,"index":59,"Rank":47,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":45.63,"AlloprofReranking":31.69,"SyntecReranking":59.57} -{"level_0":47,"index":5,"Rank":48,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":45.61,"AlloprofReranking":35.29,"SyntecReranking":55.93} -{"level_0":48,"index":38,"Rank":49,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":45.34,"AlloprofReranking":34.81,"SyntecReranking":55.88} -{"level_0":49,"index":18,"Rank":50,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":44.75,"AlloprofReranking":36.25,"SyntecReranking":53.25} -{"level_0":50,"index":41,"Rank":51,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":44.74,"AlloprofReranking":36.23,"SyntecReranking":53.25} -{"level_0":51,"index":17,"Rank":52,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":44.73,"AlloprofReranking":36.21,"SyntecReranking":53.25} -{"level_0":52,"index":16,"Rank":53,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":44.73,"AlloprofReranking":36.21,"SyntecReranking":53.25} -{"level_0":53,"index":49,"Rank":54,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":39.82,"AlloprofReranking":28.75,"SyntecReranking":50.88} -{"level_0":54,"index":79,"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":39.01,"AlloprofReranking":28.62,"SyntecReranking":49.4} -{"level_0":55,"index":78,"Rank":56,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":34.66,"AlloprofReranking":25.58,"SyntecReranking":43.75} -{"level_0":56,"index":40,"Rank":57,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":34.54,"AlloprofReranking":26.29,"SyntecReranking":42.8} -{"level_0":57,"index":81,"Rank":82,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AlloprofReranking":"","SyntecReranking":89.87} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":74.79,"AlloprofReranking":57.37,"AlloprofReranking (fra-Latn)":69.44,"SyntecReranking":86.9,"SyntecReranking (fra-Latn)":85.45} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":73.68,"AlloprofReranking":58.1,"AlloprofReranking (fra-Latn)":65.9,"SyntecReranking":85.43,"SyntecReranking (fra-Latn)":85.31} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":72.12,"AlloprofReranking":56.17,"AlloprofReranking (fra-Latn)":64.41,"SyntecReranking":86.7,"SyntecReranking (fra-Latn)":81.22} +{"Rank":4,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":71.44,"AlloprofReranking":54.34,"AlloprofReranking (fra-Latn)":67.2,"SyntecReranking":83.23,"SyntecReranking (fra-Latn)":80.97} +{"Rank":5,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":64.74,"AlloprofReranking":49.01,"AlloprofReranking (fra-Latn)":62.42,"SyntecReranking":75.03,"SyntecReranking (fra-Latn)":72.5} +{"Rank":6,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":61.44,"AlloprofReranking":49.51,"AlloprofReranking (fra-Latn)":55.37,"SyntecReranking":73.28,"SyntecReranking (fra-Latn)":67.62} +{"Rank":7,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":55.3,"AlloprofReranking":31.69,"AlloprofReranking (fra-Latn)":62.62,"SyntecReranking":59.57,"SyntecReranking (fra-Latn)":67.31} +{"Rank":8,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":72.36,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":88.57,"SyntecReranking (fra-Latn)":null} +{"Rank":9,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":63.54,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":82.65,"SyntecReranking (fra-Latn)":null} +{"Rank":10,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":70.79,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":86.77,"SyntecReranking (fra-Latn)":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":72.92,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":91.2,"SyntecReranking (fra-Latn)":null} +{"Rank":12,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":74.78,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":90.4,"SyntecReranking (fra-Latn)":null} +{"Rank":13,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"AlloprofReranking":35.29,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":55.93,"SyntecReranking (fra-Latn)":null} +{"Rank":14,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":51.6,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":88.03,"SyntecReranking (fra-Latn)":null} +{"Rank":15,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":51.01,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":85.72,"SyntecReranking (fra-Latn)":null} +{"Rank":16,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":17,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":19,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofReranking":36.21,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":53.25,"SyntecReranking (fra-Latn)":null} +{"Rank":20,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofReranking":36.21,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":53.25,"SyntecReranking (fra-Latn)":null} +{"Rank":21,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofReranking":36.25,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":53.25,"SyntecReranking (fra-Latn)":null} +{"Rank":22,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":77.95,"SyntecReranking":null,"SyntecReranking (fra-Latn)":83.32} +{"Rank":23,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"AlloprofReranking":53.0,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":82.9,"SyntecReranking (fra-Latn)":null} +{"Rank":24,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":25,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofReranking":48.68,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":79.75,"SyntecReranking (fra-Latn)":null} +{"Rank":26,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"AlloprofReranking":57.62,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":88.15,"SyntecReranking (fra-Latn)":null} +{"Rank":27,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":28,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":29,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofReranking":34.81,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":55.88,"SyntecReranking (fra-Latn)":null} +{"Rank":30,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofReranking":34.55,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":57.18,"SyntecReranking (fra-Latn)":null} +{"Rank":31,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"AlloprofReranking":26.29,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":42.8,"SyntecReranking (fra-Latn)":null} +{"Rank":32,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofReranking":36.23,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":53.25,"SyntecReranking (fra-Latn)":null} +{"Rank":33,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofReranking":38.85,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":66.4,"SyntecReranking (fra-Latn)":null} +{"Rank":34,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"AlloprofReranking":47.36,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":77.05,"SyntecReranking (fra-Latn)":null} +{"Rank":35,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":39.13,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":62.58,"SyntecReranking (fra-Latn)":null} +{"Rank":36,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":28.75,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":50.88,"SyntecReranking (fra-Latn)":null} +{"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":67.01,"SyntecReranking":null,"SyntecReranking (fra-Latn)":69.17} +{"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":69.63,"SyntecReranking":null,"SyntecReranking (fra-Latn)":66.12} +{"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":40,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":41,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"AlloprofReranking":51.77,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":74.78,"SyntecReranking (fra-Latn)":null} +{"Rank":42,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":43,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"AlloprofReranking":40.28,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":65.08,"SyntecReranking (fra-Latn)":null} +{"Rank":45,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofReranking":50.12,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":78.05,"SyntecReranking (fra-Latn)":null} +{"Rank":46,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofReranking":57.99,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":79.77,"SyntecReranking (fra-Latn)":null} +{"Rank":47,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AlloprofReranking":63.3,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":83.07,"SyntecReranking (fra-Latn)":null} +{"Rank":48,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"AlloprofReranking":68.36,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":85.15,"SyntecReranking (fra-Latn)":null} +{"Rank":49,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":50,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} +{"Rank":51,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":51.48,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":70.28,"SyntecReranking (fra-Latn)":null} +{"Rank":52,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":56.23,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":73.85,"SyntecReranking (fra-Latn)":null} +{"Rank":53,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":55.39,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":77.13,"SyntecReranking (fra-Latn)":null} +{"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"AlloprofReranking":25.58,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":43.75,"SyntecReranking (fra-Latn)":null} +{"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"AlloprofReranking":28.62,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":49.4,"SyntecReranking (fra-Latn)":null} +{"Rank":56,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":89.87,"SyntecReranking (fra-Latn)":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofReranking":null,"AlloprofReranking (fra-Latn)":null,"SyntecReranking":null,"SyntecReranking (fra-Latn)":null} diff --git a/boards_data/fr/data_tasks/Retrieval/default.jsonl b/boards_data/fr/data_tasks/Retrieval/default.jsonl index 3cc4c57fb1efa970130c92c77bc357179e46ea7a..a86bbed29eb95485a96711fba65f9b494475e2d3 100644 --- a/boards_data/fr/data_tasks/Retrieval/default.jsonl +++ b/boards_data/fr/data_tasks/Retrieval/default.jsonl @@ -1,60 +1,57 @@ -{"level_0":0,"index":9,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":55.65,"AlloprofRetrieval":58.88,"BSARDRetrieval":18.8,"MintakaRetrieval (fr)":54.03,"SyntecRetrieval":78.25,"XPQARetrieval (fr)":68.3} -{"level_0":1,"index":34,"Rank":2,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.65,"AlloprofRetrieval":58.88,"BSARDRetrieval":18.8,"MintakaRetrieval (fr)":54.03,"SyntecRetrieval":78.25,"XPQARetrieval (fr)":68.3} -{"level_0":2,"index":56,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.65,"AlloprofRetrieval":58.88,"BSARDRetrieval":18.8,"MintakaRetrieval (fr)":54.03,"SyntecRetrieval":78.25,"XPQARetrieval (fr)":68.3} -{"level_0":3,"index":4,"Rank":4,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.56,"AlloprofRetrieval":58.27,"BSARDRetrieval":5.14,"MintakaRetrieval (fr)":49.19,"SyntecRetrieval":87.28,"XPQARetrieval (fr)":72.92} -{"level_0":4,"index":3,"Rank":5,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.98,"AlloprofRetrieval":57.28,"BSARDRetrieval":11.83,"MintakaRetrieval (fr)":34.92,"SyntecRetrieval":87.33,"XPQARetrieval (fr)":73.56} -{"level_0":5,"index":8,"Rank":6,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.56,"AlloprofRetrieval":57.89,"BSARDRetrieval":6.3,"MintakaRetrieval (fr)":42.56,"SyntecRetrieval":90.47,"XPQARetrieval (fr)":65.58} -{"level_0":6,"index":55,"Rank":7,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.56,"AlloprofRetrieval":57.89,"BSARDRetrieval":6.3,"MintakaRetrieval (fr)":42.56,"SyntecRetrieval":90.47,"XPQARetrieval (fr)":65.58} -{"level_0":7,"index":22,"Rank":8,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.09,"AlloprofRetrieval":47.6,"BSARDRetrieval":19.58,"MintakaRetrieval (fr)":32.62,"SyntecRetrieval":84.2,"XPQARetrieval (fr)":66.43} -{"level_0":8,"index":81,"Rank":9,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.23,"AlloprofRetrieval":51.64,"BSARDRetrieval":0.61,"MintakaRetrieval (fr)":29.94,"SyntecRetrieval":85.97,"XPQARetrieval (fr)":73.0} -{"level_0":9,"index":0,"Rank":10,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.81,"AlloprofRetrieval":56.84,"BSARDRetrieval":2.48,"MintakaRetrieval (fr)":21.73,"SyntecRetrieval":78.77,"XPQARetrieval (fr)":74.24} -{"level_0":10,"index":29,"Rank":11,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.78,"AlloprofRetrieval":46.94,"BSARDRetrieval":2.08,"MintakaRetrieval (fr)":30.07,"SyntecRetrieval":84.6,"XPQARetrieval (fr)":70.22} -{"level_0":11,"index":23,"Rank":12,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.68,"AlloprofRetrieval":44.42,"BSARDRetrieval":12.01,"MintakaRetrieval (fr)":25.19,"SyntecRetrieval":82.86,"XPQARetrieval (fr)":68.91} -{"level_0":12,"index":2,"Rank":13,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.33,"AlloprofRetrieval":52.61,"BSARDRetrieval":0.29,"MintakaRetrieval (fr)":19.05,"SyntecRetrieval":82.77,"XPQARetrieval (fr)":71.95} -{"level_0":13,"index":21,"Rank":14,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.99,"AlloprofRetrieval":41.26,"BSARDRetrieval":12.22,"MintakaRetrieval (fr)":27.6,"SyntecRetrieval":79.68,"XPQARetrieval (fr)":64.21} -{"level_0":14,"index":53,"Rank":15,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.9,"AlloprofRetrieval":49.84,"BSARDRetrieval":0.22,"MintakaRetrieval (fr)":31.25,"SyntecRetrieval":76.63,"XPQARetrieval (fr)":66.55} -{"level_0":15,"index":54,"Rank":16,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":44.41,"AlloprofRetrieval":49.05,"BSARDRetrieval":0.16,"MintakaRetrieval (fr)":31.06,"SyntecRetrieval":76.96,"XPQARetrieval (fr)":64.8} -{"level_0":16,"index":52,"Rank":17,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.85,"AlloprofRetrieval":47.69,"BSARDRetrieval":1.75,"MintakaRetrieval (fr)":27.88,"SyntecRetrieval":77.67,"XPQARetrieval (fr)":64.27} -{"level_0":17,"index":72,"Rank":18,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":43.84,"AlloprofRetrieval":45.75,"BSARDRetrieval":3.33,"MintakaRetrieval (fr)":34.93,"SyntecRetrieval":78.97,"XPQARetrieval (fr)":56.2} -{"level_0":18,"index":50,"Rank":19,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.58,"AlloprofRetrieval":45.1,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":25.17,"SyntecRetrieval":79.14,"XPQARetrieval (fr)":68.49} -{"level_0":19,"index":80,"Rank":20,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.96,"AlloprofRetrieval":38.06,"BSARDRetrieval":11.04,"MintakaRetrieval (fr)":27.48,"SyntecRetrieval":78.2,"XPQARetrieval (fr)":60.02} -{"level_0":20,"index":24,"Rank":21,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.92,"AlloprofRetrieval":39.89,"BSARDRetrieval":8.41,"MintakaRetrieval (fr)":25.52,"SyntecRetrieval":77.57,"XPQARetrieval (fr)":63.2} -{"level_0":21,"index":45,"Rank":22,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":42.17,"AlloprofRetrieval":38.15,"BSARDRetrieval":0.27,"MintakaRetrieval (fr)":25.2,"SyntecRetrieval":81.07,"XPQARetrieval (fr)":66.15} -{"level_0":22,"index":44,"Rank":23,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":41.19,"AlloprofRetrieval":36.21,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":23.46,"SyntecRetrieval":80.49,"XPQARetrieval (fr)":65.81} -{"level_0":23,"index":51,"Rank":24,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":41.15,"AlloprofRetrieval":45.41,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":25.7,"SyntecRetrieval":75.75,"XPQARetrieval (fr)":58.88} -{"level_0":24,"index":1,"Rank":25,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.81,"AlloprofRetrieval":45.5,"BSARDRetrieval":0.15,"MintakaRetrieval (fr)":15.51,"SyntecRetrieval":75.83,"XPQARetrieval (fr)":67.07} -{"level_0":25,"index":15,"Rank":26,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":40.42,"AlloprofRetrieval":38.36,"BSARDRetrieval":0.14,"MintakaRetrieval (fr)":25.44,"SyntecRetrieval":79.27,"XPQARetrieval (fr)":58.87} -{"level_0":26,"index":36,"Rank":27,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":40.04,"AlloprofRetrieval":31.62,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":21.87,"SyntecRetrieval":81.11,"XPQARetrieval (fr)":65.62} -{"level_0":27,"index":71,"Rank":28,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":39.69,"AlloprofRetrieval":40.38,"BSARDRetrieval":0.14,"MintakaRetrieval (fr)":31.54,"SyntecRetrieval":74.24,"XPQARetrieval (fr)":52.14} -{"level_0":28,"index":76,"Rank":29,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.16,"AlloprofRetrieval":35.27,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":26.12,"SyntecRetrieval":69.82,"XPQARetrieval (fr)":59.59} -{"level_0":29,"index":77,"Rank":30,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.78,"AlloprofRetrieval":33.78,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":26.21,"SyntecRetrieval":63.69,"XPQARetrieval (fr)":65.21} -{"level_0":30,"index":30,"Rank":31,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":36.81,"AlloprofRetrieval":29.97,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":21.31,"SyntecRetrieval":74.2,"XPQARetrieval (fr)":58.57} -{"level_0":31,"index":47,"Rank":32,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":36.55,"AlloprofRetrieval":27.01,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":22.53,"SyntecRetrieval":75.76,"XPQARetrieval (fr)":57.47} -{"level_0":32,"index":14,"Rank":33,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.1,"AlloprofRetrieval":35.39,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":23.0,"SyntecRetrieval":76.88,"XPQARetrieval (fr)":45.23} -{"level_0":33,"index":70,"Rank":34,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":35.66,"AlloprofRetrieval":34.52,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":23.92,"SyntecRetrieval":71.05,"XPQARetrieval (fr)":48.79} -{"level_0":34,"index":68,"Rank":35,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":35.49,"AlloprofRetrieval":30.8,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":24.45,"SyntecRetrieval":76.0,"XPQARetrieval (fr)":46.22} -{"level_0":35,"index":63,"Rank":36,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":33.22,"AlloprofRetrieval":26.99,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":22.55,"SyntecRetrieval":65.34,"XPQARetrieval (fr)":51.2} -{"level_0":36,"index":35,"Rank":37,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":32.37,"AlloprofRetrieval":21.94,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":13.36,"SyntecRetrieval":68.62,"XPQARetrieval (fr)":57.92} -{"level_0":37,"index":69,"Rank":38,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":32.18,"AlloprofRetrieval":27.52,"BSARDRetrieval":0.16,"MintakaRetrieval (fr)":21.04,"SyntecRetrieval":67.0,"XPQARetrieval (fr)":45.19} -{"level_0":38,"index":67,"Rank":39,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":31.24,"AlloprofRetrieval":26.63,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":21.53,"SyntecRetrieval":65.54,"XPQARetrieval (fr)":42.51} -{"level_0":39,"index":66,"Rank":40,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":30.69,"AlloprofRetrieval":30.23,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":16.31,"SyntecRetrieval":58.07,"XPQARetrieval (fr)":48.83} -{"level_0":40,"index":59,"Rank":41,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":29.91,"AlloprofRetrieval":28.41,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":9.19,"SyntecRetrieval":60.15,"XPQARetrieval (fr)":51.79} -{"level_0":41,"index":57,"Rank":42,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":28.47,"AlloprofRetrieval":19.77,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":15.53,"SyntecRetrieval":55.31,"XPQARetrieval (fr)":51.74} -{"level_0":42,"index":75,"Rank":43,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":24.76,"AlloprofRetrieval":18.9,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":14.81,"SyntecRetrieval":49.69,"XPQARetrieval (fr)":40.4} -{"level_0":43,"index":43,"Rank":44,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":23.44,"AlloprofRetrieval":16.46,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":3.57,"SyntecRetrieval":55.9,"XPQARetrieval (fr)":41.29} -{"level_0":44,"index":48,"Rank":45,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.91,"AlloprofRetrieval":12.37,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":2.78,"SyntecRetrieval":40.57,"XPQARetrieval (fr)":33.82} -{"level_0":45,"index":5,"Rank":46,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":16.19,"AlloprofRetrieval":3.1,"BSARDRetrieval":0.36,"MintakaRetrieval (fr)":6.31,"SyntecRetrieval":28.58,"XPQARetrieval (fr)":42.59} -{"level_0":46,"index":42,"Rank":47,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":13.89,"AlloprofRetrieval":5.51,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":2.87,"SyntecRetrieval":34.95,"XPQARetrieval (fr)":26.12} -{"level_0":47,"index":7,"Rank":48,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":12.94,"AlloprofRetrieval":6.83,"BSARDRetrieval":2.18,"MintakaRetrieval (fr)":1.66,"SyntecRetrieval":27.64,"XPQARetrieval (fr)":26.39} -{"level_0":48,"index":41,"Rank":49,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":8.52,"AlloprofRetrieval":1.63,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":3.55,"SyntecRetrieval":18.95,"XPQARetrieval (fr)":18.49} -{"level_0":49,"index":18,"Rank":50,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":8.51,"AlloprofRetrieval":1.6,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":3.55,"SyntecRetrieval":18.95,"XPQARetrieval (fr)":18.46} -{"level_0":50,"index":16,"Rank":51,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":8.5,"AlloprofRetrieval":1.6,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":3.55,"SyntecRetrieval":18.95,"XPQARetrieval (fr)":18.39} -{"level_0":51,"index":17,"Rank":52,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":8.49,"AlloprofRetrieval":1.61,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":3.55,"SyntecRetrieval":18.95,"XPQARetrieval (fr)":18.35} -{"level_0":52,"index":49,"Rank":53,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":7.98,"AlloprofRetrieval":1.98,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":0.48,"SyntecRetrieval":24.45,"XPQARetrieval (fr)":12.98} -{"level_0":53,"index":39,"Rank":54,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":6.73,"AlloprofRetrieval":1.72,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":0.51,"SyntecRetrieval":22.33,"XPQARetrieval (fr)":9.09} -{"level_0":54,"index":38,"Rank":55,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":5.87,"AlloprofRetrieval":1.63,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":0.58,"SyntecRetrieval":20.56,"XPQARetrieval (fr)":6.59} -{"level_0":55,"index":79,"Rank":56,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":4.14,"AlloprofRetrieval":0.52,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":0.9,"SyntecRetrieval":6.6,"XPQARetrieval (fr)":12.7} -{"level_0":56,"index":78,"Rank":57,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":3.2,"AlloprofRetrieval":0.16,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":0.88,"SyntecRetrieval":3.33,"XPQARetrieval (fr)":11.65} -{"level_0":57,"index":40,"Rank":58,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":1.22,"AlloprofRetrieval":0.58,"BSARDRetrieval":0.0,"MintakaRetrieval (fr)":0.26,"SyntecRetrieval":1.58,"XPQARetrieval (fr)":3.69} -{"level_0":58,"index":10,"Rank":60,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AlloprofRetrieval":58.5,"BSARDRetrieval":28.52,"MintakaRetrieval (fr)":62.53,"SyntecRetrieval":90.37,"XPQARetrieval (fr)":""} -{"level_0":59,"index":58,"Rank":75,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AlloprofRetrieval":33.2,"BSARDRetrieval":"","MintakaRetrieval (fr)":"","SyntecRetrieval":"","XPQARetrieval (fr)":55.9} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":44.23,"AlloprofRetrieval":38.15,"AlloprofRetrieval (fra-Latn)":39.34,"BSARDRetrieval":0.27,"BSARDRetrieval (fra-Latn)":21.28,"MintakaRetrieval (fr)":25.2,"SyntecRetrieval":81.07,"SyntecRetrieval (fra-Latn)":82.39,"XPQARetrieval (fr)":66.15} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":42.76,"AlloprofRetrieval":36.21,"AlloprofRetrieval (fra-Latn)":34.45,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":18.83,"MintakaRetrieval (fr)":23.46,"SyntecRetrieval":80.49,"SyntecRetrieval (fra-Latn)":82.86,"XPQARetrieval (fr)":65.81} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":37.27,"AlloprofRetrieval":27.01,"AlloprofRetrieval (fra-Latn)":27.38,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":14.54,"MintakaRetrieval (fr)":22.53,"SyntecRetrieval":75.76,"SyntecRetrieval (fra-Latn)":73.46,"XPQARetrieval (fr)":57.47} +{"Rank":4,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":37.18,"AlloprofRetrieval":30.8,"AlloprofRetrieval (fra-Latn)":30.8,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":13.19,"MintakaRetrieval (fr)":24.45,"SyntecRetrieval":76.0,"SyntecRetrieval (fra-Latn)":76.0,"XPQARetrieval (fr)":46.22} +{"Rank":5,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":32.25,"AlloprofRetrieval":26.63,"AlloprofRetrieval (fra-Latn)":26.63,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":9.6,"MintakaRetrieval (fr)":21.53,"SyntecRetrieval":65.54,"SyntecRetrieval (fra-Latn)":65.54,"XPQARetrieval (fr)":42.51} +{"Rank":6,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":30.36,"AlloprofRetrieval":28.41,"AlloprofRetrieval (fra-Latn)":28.41,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":4.8,"MintakaRetrieval (fr)":9.19,"SyntecRetrieval":60.15,"SyntecRetrieval (fra-Latn)":60.15,"XPQARetrieval (fr)":51.79} +{"Rank":7,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":27.73,"AlloprofRetrieval":19.77,"AlloprofRetrieval (fra-Latn)":19.77,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":4.44,"MintakaRetrieval (fr)":15.53,"SyntecRetrieval":55.31,"SyntecRetrieval (fra-Latn)":55.31,"XPQARetrieval (fr)":51.74} +{"Rank":8,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":56.84,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":2.48,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":21.73,"SyntecRetrieval":78.77,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":74.24} +{"Rank":9,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":45.5,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.15,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":15.51,"SyntecRetrieval":75.83,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":67.07} +{"Rank":10,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":52.61,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.29,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":19.05,"SyntecRetrieval":82.77,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":71.95} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":57.28,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":11.83,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":34.92,"SyntecRetrieval":87.33,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":73.56} +{"Rank":12,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":58.27,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":5.14,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":49.19,"SyntecRetrieval":87.28,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":72.92} +{"Rank":13,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"AlloprofRetrieval":3.1,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.36,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":6.31,"SyntecRetrieval":28.58,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":42.59} +{"Rank":14,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":35.39,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":23.0,"SyntecRetrieval":76.88,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":45.23} +{"Rank":15,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":38.36,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.14,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":25.44,"SyntecRetrieval":79.27,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":58.87} +{"Rank":16,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":17,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":19,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofRetrieval":1.6,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":3.55,"SyntecRetrieval":18.95,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":18.39} +{"Rank":20,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofRetrieval":1.61,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":3.55,"SyntecRetrieval":18.95,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":18.35} +{"Rank":21,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofRetrieval":1.6,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":3.55,"SyntecRetrieval":18.95,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":18.46} +{"Rank":22,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":55.42,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":26.63,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":89.48,"XPQARetrieval (fr)":null} +{"Rank":23,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"AlloprofRetrieval":29.97,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":21.31,"SyntecRetrieval":74.2,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":58.57} +{"Rank":24,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":25,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofRetrieval":21.94,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":13.36,"SyntecRetrieval":68.62,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":57.92} +{"Rank":26,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"AlloprofRetrieval":31.62,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":21.87,"SyntecRetrieval":81.11,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":65.62} +{"Rank":27,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":28,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":29,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofRetrieval":1.63,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":0.58,"SyntecRetrieval":20.56,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":6.59} +{"Rank":30,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AlloprofRetrieval":1.72,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":0.51,"SyntecRetrieval":22.33,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":9.09} +{"Rank":31,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"AlloprofRetrieval":0.58,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":0.26,"SyntecRetrieval":1.58,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":3.69} +{"Rank":32,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"AlloprofRetrieval":1.63,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":3.55,"SyntecRetrieval":18.95,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":18.49} +{"Rank":33,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofRetrieval":5.51,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":2.87,"SyntecRetrieval":34.95,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":26.12} +{"Rank":34,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"AlloprofRetrieval":16.46,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":3.57,"SyntecRetrieval":55.9,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":41.29} +{"Rank":35,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":12.37,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":2.78,"SyntecRetrieval":40.57,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":33.82} +{"Rank":36,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":1.98,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":0.48,"SyntecRetrieval":24.45,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":12.98} +{"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"AlloprofRetrieval":33.2,"AlloprofRetrieval (fra-Latn)":33.2,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":6.24,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":60.8,"XPQARetrieval (fr)":55.9} +{"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":34.27,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":6.98,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":57.39,"XPQARetrieval (fr)":null} +{"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":40,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":41,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"AlloprofRetrieval":26.99,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":22.55,"SyntecRetrieval":65.34,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":51.2} +{"Rank":42,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":43,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"AlloprofRetrieval":30.23,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":16.31,"SyntecRetrieval":58.07,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":48.83} +{"Rank":45,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AlloprofRetrieval":27.52,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.16,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":21.04,"SyntecRetrieval":67.0,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":45.19} +{"Rank":46,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AlloprofRetrieval":34.52,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":23.92,"SyntecRetrieval":71.05,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":48.79} +{"Rank":47,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AlloprofRetrieval":40.38,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.14,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":31.54,"SyntecRetrieval":74.24,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":52.14} +{"Rank":48,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"AlloprofRetrieval":45.75,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":3.33,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":34.93,"SyntecRetrieval":78.97,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":56.2} +{"Rank":49,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":50,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} +{"Rank":51,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":18.9,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":14.81,"SyntecRetrieval":49.69,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":40.4} +{"Rank":52,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":35.27,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":26.12,"SyntecRetrieval":69.82,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":59.59} +{"Rank":53,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":33.78,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":26.21,"SyntecRetrieval":63.69,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":65.21} +{"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"AlloprofRetrieval":0.16,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":0.88,"SyntecRetrieval":3.33,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":11.65} +{"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"AlloprofRetrieval":0.52,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.0,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":0.9,"SyntecRetrieval":6.6,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":12.7} +{"Rank":56,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":51.64,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":0.61,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":29.94,"SyntecRetrieval":85.97,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":73.0} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AlloprofRetrieval":null,"AlloprofRetrieval (fra-Latn)":null,"BSARDRetrieval":null,"BSARDRetrieval (fra-Latn)":null,"MintakaRetrieval (fr)":null,"SyntecRetrieval":null,"SyntecRetrieval (fra-Latn)":null,"XPQARetrieval (fr)":null} diff --git a/boards_data/fr/data_tasks/STS/default.jsonl b/boards_data/fr/data_tasks/STS/default.jsonl index 00f834f4ee12acfd5fcc49c30ba72aae733570b5..93c77c9db4b7dcfccad8229ab83b869760b36098 100644 --- a/boards_data/fr/data_tasks/STS/default.jsonl +++ b/boards_data/fr/data_tasks/STS/default.jsonl @@ -1,74 +1,57 @@ -{"level_0":0,"index":22,"Rank":1,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.62,"STS22 (fr)":84.64,"STSBenchmarkMultilingualSTS (fr)":87.02,"SICKFr":79.2} -{"level_0":1,"index":23,"Rank":2,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":83.08,"STS22 (fr)":82.84,"STSBenchmarkMultilingualSTS (fr)":86.59,"SICKFr":79.81} -{"level_0":2,"index":10,"Rank":3,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.59,"STS22 (fr)":83.28,"STSBenchmarkMultilingualSTS (fr)":85.09,"SICKFr":79.39} -{"level_0":3,"index":34,"Rank":4,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.31,"STS22 (fr)":82.58,"STSBenchmarkMultilingualSTS (fr)":85.46,"SICKFr":78.9} -{"level_0":4,"index":56,"Rank":5,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.31,"STS22 (fr)":82.58,"STSBenchmarkMultilingualSTS (fr)":85.46,"SICKFr":78.9} -{"level_0":5,"index":9,"Rank":6,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":82.31,"STS22 (fr)":82.58,"STSBenchmarkMultilingualSTS (fr)":85.46,"SICKFr":78.9} -{"level_0":6,"index":36,"Rank":7,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":81.74,"STS22 (fr)":81.73,"STSBenchmarkMultilingualSTS (fr)":85.79,"SICKFr":77.7} -{"level_0":7,"index":15,"Rank":8,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.28,"STS22 (fr)":82.76,"STSBenchmarkMultilingualSTS (fr)":81.84,"SICKFr":79.23} -{"level_0":8,"index":55,"Rank":9,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.26,"STS22 (fr)":81.84,"STSBenchmarkMultilingualSTS (fr)":82.25,"SICKFr":79.68} -{"level_0":9,"index":8,"Rank":10,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.26,"STS22 (fr)":81.84,"STSBenchmarkMultilingualSTS (fr)":82.25,"SICKFr":79.68} -{"level_0":10,"index":80,"Rank":11,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.59,"STS22 (fr)":80.31,"STSBenchmarkMultilingualSTS (fr)":84.36,"SICKFr":77.1} -{"level_0":11,"index":3,"Rank":12,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.29,"STS22 (fr)":83.75,"STSBenchmarkMultilingualSTS (fr)":83.02,"SICKFr":74.09} -{"level_0":12,"index":4,"Rank":13,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.13,"STS22 (fr)":82.76,"STSBenchmarkMultilingualSTS (fr)":82.72,"SICKFr":74.9} -{"level_0":13,"index":21,"Rank":14,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.12,"STS22 (fr)":81.14,"STSBenchmarkMultilingualSTS (fr)":83.75,"SICKFr":75.48} -{"level_0":14,"index":29,"Rank":15,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.01,"STS22 (fr)":83.31,"STSBenchmarkMultilingualSTS (fr)":79.99,"SICKFr":76.74} -{"level_0":15,"index":0,"Rank":16,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.56,"STS22 (fr)":82.74,"STSBenchmarkMultilingualSTS (fr)":79.72,"SICKFr":76.21} -{"level_0":16,"index":50,"Rank":17,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.5,"STS22 (fr)":81.7,"STSBenchmarkMultilingualSTS (fr)":81.3,"SICKFr":75.51} -{"level_0":17,"index":45,"Rank":18,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":79.37,"STS22 (fr)":76.79,"STSBenchmarkMultilingualSTS (fr)":82.53,"SICKFr":78.78} -{"level_0":18,"index":24,"Rank":19,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.32,"STS22 (fr)":82.57,"STSBenchmarkMultilingualSTS (fr)":82.51,"SICKFr":72.88} -{"level_0":19,"index":72,"Rank":20,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":78.37,"STS22 (fr)":76.8,"STSBenchmarkMultilingualSTS (fr)":81.24,"SICKFr":77.07} -{"level_0":20,"index":81,"Rank":21,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.31,"STS22 (fr)":81.09,"STSBenchmarkMultilingualSTS (fr)":77.55,"SICKFr":76.28} -{"level_0":21,"index":75,"Rank":22,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.28,"STS22 (fr)":74.1,"STSBenchmarkMultilingualSTS (fr)":83.48,"SICKFr":77.25} -{"level_0":22,"index":14,"Rank":23,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.26,"STS22 (fr)":82.8,"STSBenchmarkMultilingualSTS (fr)":76.48,"SICKFr":75.5} -{"level_0":23,"index":68,"Rank":24,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":78.18,"STS22 (fr)":74.3,"STSBenchmarkMultilingualSTS (fr)":84.69,"SICKFr":75.56} -{"level_0":24,"index":35,"Rank":25,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":77.79,"STS22 (fr)":77.54,"STSBenchmarkMultilingualSTS (fr)":81.64,"SICKFr":74.18} -{"level_0":25,"index":54,"Rank":26,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.72,"STS22 (fr)":82.35,"STSBenchmarkMultilingualSTS (fr)":79.22,"SICKFr":71.6} -{"level_0":26,"index":53,"Rank":27,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.64,"STS22 (fr)":80.8,"STSBenchmarkMultilingualSTS (fr)":80.23,"SICKFr":71.89} -{"level_0":27,"index":2,"Rank":28,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.52,"STS22 (fr)":79.99,"STSBenchmarkMultilingualSTS (fr)":79.02,"SICKFr":73.56} -{"level_0":28,"index":71,"Rank":29,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":77.33,"STS22 (fr)":77.49,"STSBenchmarkMultilingualSTS (fr)":79.42,"SICKFr":75.08} -{"level_0":29,"index":44,"Rank":30,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":77.22,"STS22 (fr)":74.8,"STSBenchmarkMultilingualSTS (fr)":80.62,"SICKFr":76.23} -{"level_0":30,"index":47,"Rank":31,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":77.17,"STS22 (fr)":76.58,"STSBenchmarkMultilingualSTS (fr)":79.32,"SICKFr":75.62} -{"level_0":31,"index":30,"Rank":32,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":75.87,"STS22 (fr)":78.77,"STSBenchmarkMultilingualSTS (fr)":79.23,"SICKFr":69.6} -{"level_0":32,"index":63,"Rank":33,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":75.46,"STS22 (fr)":76.41,"STSBenchmarkMultilingualSTS (fr)":77.49,"SICKFr":72.49} -{"level_0":33,"index":67,"Rank":34,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":75.18,"STS22 (fr)":70.55,"STSBenchmarkMultilingualSTS (fr)":79.9,"SICKFr":75.1} -{"level_0":34,"index":70,"Rank":35,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":75.14,"STS22 (fr)":75.01,"STSBenchmarkMultilingualSTS (fr)":77.59,"SICKFr":72.83} -{"level_0":35,"index":76,"Rank":36,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.92,"STS22 (fr)":77.91,"STSBenchmarkMultilingualSTS (fr)":75.48,"SICKFr":71.37} -{"level_0":36,"index":52,"Rank":37,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.91,"STS22 (fr)":78.68,"STSBenchmarkMultilingualSTS (fr)":76.38,"SICKFr":69.67} -{"level_0":37,"index":77,"Rank":38,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.55,"STS22 (fr)":71.11,"STSBenchmarkMultilingualSTS (fr)":78.16,"SICKFr":74.39} -{"level_0":38,"index":69,"Rank":39,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":74.49,"STS22 (fr)":77.69,"STSBenchmarkMultilingualSTS (fr)":74.04,"SICKFr":71.74} -{"level_0":39,"index":57,"Rank":40,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":74.33,"STS22 (fr)":77.95,"STSBenchmarkMultilingualSTS (fr)":75.1,"SICKFr":69.94} -{"level_0":40,"index":51,"Rank":41,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.28,"STS22 (fr)":75.66,"STSBenchmarkMultilingualSTS (fr)":71.13,"SICKFr":70.04} -{"level_0":41,"index":1,"Rank":42,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.82,"STS22 (fr)":70.51,"STSBenchmarkMultilingualSTS (fr)":76.43,"SICKFr":68.51} -{"level_0":42,"index":59,"Rank":43,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":68.14,"STS22 (fr)":77.0,"STSBenchmarkMultilingualSTS (fr)":64.93,"SICKFr":62.48} -{"level_0":43,"index":66,"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":66.86,"STS22 (fr)":74.62,"STSBenchmarkMultilingualSTS (fr)":63.85,"SICKFr":62.11} -{"level_0":44,"index":43,"Rank":45,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":65.36,"STS22 (fr)":69.82,"STSBenchmarkMultilingualSTS (fr)":61.87,"SICKFr":64.39} -{"level_0":45,"index":5,"Rank":46,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":64.46,"STS22 (fr)":58.61,"STSBenchmarkMultilingualSTS (fr)":69.82,"SICKFr":64.95} -{"level_0":46,"index":48,"Rank":47,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.34,"STS22 (fr)":77.1,"STSBenchmarkMultilingualSTS (fr)":49.97,"SICKFr":59.94} -{"level_0":47,"index":7,"Rank":48,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.33,"STS22 (fr)":67.83,"STSBenchmarkMultilingualSTS (fr)":51.98,"SICKFr":58.18} -{"level_0":48,"index":42,"Rank":49,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":56.57,"STS22 (fr)":56.47,"STSBenchmarkMultilingualSTS (fr)":54.97,"SICKFr":58.26} -{"level_0":49,"index":38,"Rank":50,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":52.12,"STS22 (fr)":65.37,"STSBenchmarkMultilingualSTS (fr)":37.14,"SICKFr":53.86} -{"level_0":50,"index":49,"Rank":51,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.89,"STS22 (fr)":61.35,"STSBenchmarkMultilingualSTS (fr)":36.78,"SICKFr":54.54} -{"level_0":51,"index":78,"Rank":52,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":50.52,"STS22 (fr)":56.72,"STSBenchmarkMultilingualSTS (fr)":46.23,"SICKFr":48.62} -{"level_0":52,"index":17,"Rank":53,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":50.47,"STS22 (fr)":40.4,"STSBenchmarkMultilingualSTS (fr)":52.25,"SICKFr":58.77} -{"level_0":53,"index":16,"Rank":54,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":50.44,"STS22 (fr)":40.31,"STSBenchmarkMultilingualSTS (fr)":52.25,"SICKFr":58.76} -{"level_0":54,"index":41,"Rank":55,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":50.02,"STS22 (fr)":39.05,"STSBenchmarkMultilingualSTS (fr)":52.25,"SICKFr":58.75} -{"level_0":55,"index":18,"Rank":56,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":49.93,"STS22 (fr)":38.77,"STSBenchmarkMultilingualSTS (fr)":52.25,"SICKFr":58.76} -{"level_0":56,"index":79,"Rank":57,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":49.27,"STS22 (fr)":55.49,"STSBenchmarkMultilingualSTS (fr)":42.32,"SICKFr":50.01} -{"level_0":57,"index":39,"Rank":58,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":43.49,"STS22 (fr)":55.15,"STSBenchmarkMultilingualSTS (fr)":33.41,"SICKFr":41.9} -{"level_0":58,"index":40,"Rank":59,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":32.93,"STS22 (fr)":48.52,"STSBenchmarkMultilingualSTS (fr)":15.66,"SICKFr":34.6} -{"level_0":59,"index":20,"Rank":65,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":79.88,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"level_0":60,"index":25,"Rank":66,"Model":"paraphrase-multilingual-mpnet-base-v2-KE_Sieve<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":72.79,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"level_0":61,"index":26,"Rank":67,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":67.66,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"level_0":62,"index":27,"Rank":68,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":54.56,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"level_0":63,"index":31,"Rank":70,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":81.47,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"level_0":64,"index":32,"Rank":71,"Model":"sgpt-bloom-1b7-nli<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":73.13,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"level_0":65,"index":33,"Rank":72,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":80.38,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"level_0":66,"index":37,"Rank":73,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":61.72,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"level_0":67,"index":58,"Rank":75,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS22 (fr)":69.51,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"level_0":68,"index":61,"Rank":77,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS22 (fr)":53.92,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"level_0":69,"index":62,"Rank":78,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":"","STS22 (fr)":49.43,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"level_0":70,"index":64,"Rank":79,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","STS22 (fr)":78.7,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"level_0":71,"index":65,"Rank":80,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","STS22 (fr)":79.43,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"level_0":72,"index":73,"Rank":81,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":42.0,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} -{"level_0":73,"index":74,"Rank":82,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS22 (fr)":74.1,"STSBenchmarkMultilingualSTS (fr)":"","SICKFr":""} +{"Rank":1,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":77.53,"STS22 (fr)":74.3,"STSBenchmarkMultilingualSTS (fr)":84.69,"SICKFr":75.56,"SICKFr (fra-Latn)":75.56} +{"Rank":2,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":75.16,"STS22 (fr)":70.55,"STSBenchmarkMultilingualSTS (fr)":79.9,"SICKFr":75.1,"SICKFr (fra-Latn)":75.1} +{"Rank":3,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":73.23,"STS22 (fr)":77.95,"STSBenchmarkMultilingualSTS (fr)":75.1,"SICKFr":69.94,"SICKFr (fra-Latn)":69.94} +{"Rank":4,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":66.72,"STS22 (fr)":77.0,"STSBenchmarkMultilingualSTS (fr)":64.93,"SICKFr":62.48,"SICKFr (fra-Latn)":62.48} +{"Rank":5,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":82.74,"STSBenchmarkMultilingualSTS (fr)":79.72,"SICKFr":76.21,"SICKFr (fra-Latn)":null} +{"Rank":6,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":70.51,"STSBenchmarkMultilingualSTS (fr)":76.43,"SICKFr":68.51,"SICKFr (fra-Latn)":null} +{"Rank":7,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":79.99,"STSBenchmarkMultilingualSTS (fr)":79.02,"SICKFr":73.56,"SICKFr (fra-Latn)":null} +{"Rank":8,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":83.75,"STSBenchmarkMultilingualSTS (fr)":83.02,"SICKFr":74.09,"SICKFr (fra-Latn)":null} +{"Rank":9,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":82.76,"STSBenchmarkMultilingualSTS (fr)":82.72,"SICKFr":74.9,"SICKFr (fra-Latn)":null} +{"Rank":10,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"STS22 (fr)":58.61,"STSBenchmarkMultilingualSTS (fr)":69.82,"SICKFr":64.95,"SICKFr (fra-Latn)":null} +{"Rank":11,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":82.8,"STSBenchmarkMultilingualSTS (fr)":76.48,"SICKFr":75.5,"SICKFr (fra-Latn)":null} +{"Rank":12,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":82.76,"STSBenchmarkMultilingualSTS (fr)":81.84,"SICKFr":79.23,"SICKFr (fra-Latn)":null} +{"Rank":13,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":14,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":15,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":16,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"STS22 (fr)":40.31,"STSBenchmarkMultilingualSTS (fr)":52.25,"SICKFr":58.76,"SICKFr (fra-Latn)":null} +{"Rank":17,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"STS22 (fr)":40.4,"STSBenchmarkMultilingualSTS (fr)":52.25,"SICKFr":58.77,"SICKFr (fra-Latn)":null} +{"Rank":18,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"STS22 (fr)":38.77,"STSBenchmarkMultilingualSTS (fr)":52.25,"SICKFr":58.76,"SICKFr (fra-Latn)":null} +{"Rank":19,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":76.91} +{"Rank":20,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"STS22 (fr)":78.77,"STSBenchmarkMultilingualSTS (fr)":79.23,"SICKFr":69.6,"SICKFr (fra-Latn)":null} +{"Rank":21,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":22,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"STS22 (fr)":77.54,"STSBenchmarkMultilingualSTS (fr)":81.64,"SICKFr":74.18,"SICKFr (fra-Latn)":null} +{"Rank":23,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"STS22 (fr)":81.73,"STSBenchmarkMultilingualSTS (fr)":85.79,"SICKFr":77.7,"SICKFr (fra-Latn)":null} +{"Rank":24,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":25,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":26,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"STS22 (fr)":65.37,"STSBenchmarkMultilingualSTS (fr)":37.14,"SICKFr":53.86,"SICKFr (fra-Latn)":null} +{"Rank":27,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"STS22 (fr)":55.15,"STSBenchmarkMultilingualSTS (fr)":33.41,"SICKFr":41.9,"SICKFr (fra-Latn)":null} +{"Rank":28,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"STS22 (fr)":48.52,"STSBenchmarkMultilingualSTS (fr)":15.66,"SICKFr":34.6,"SICKFr (fra-Latn)":null} +{"Rank":29,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"STS22 (fr)":39.05,"STSBenchmarkMultilingualSTS (fr)":52.25,"SICKFr":58.75,"SICKFr (fra-Latn)":null} +{"Rank":30,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"STS22 (fr)":56.47,"STSBenchmarkMultilingualSTS (fr)":54.97,"SICKFr":58.26,"SICKFr (fra-Latn)":null} +{"Rank":31,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"STS22 (fr)":69.82,"STSBenchmarkMultilingualSTS (fr)":61.87,"SICKFr":64.39,"SICKFr (fra-Latn)":null} +{"Rank":32,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":80.62,"SICKFr":76.23,"SICKFr (fra-Latn)":75.76} +{"Rank":33,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":82.53,"SICKFr":78.78,"SICKFr (fra-Latn)":78.81} +{"Rank":34,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":79.32,"SICKFr":75.62,"SICKFr (fra-Latn)":74.67} +{"Rank":35,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":77.1,"STSBenchmarkMultilingualSTS (fr)":49.97,"SICKFr":59.94,"SICKFr (fra-Latn)":null} +{"Rank":36,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":61.35,"STSBenchmarkMultilingualSTS (fr)":36.78,"SICKFr":54.54,"SICKFr (fra-Latn)":null} +{"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"STS22 (fr)":69.51,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":63.16} +{"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":67.05} +{"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"STS22 (fr)":53.92,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":40,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"STS22 (fr)":49.43,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":41,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"STS22 (fr)":76.41,"STSBenchmarkMultilingualSTS (fr)":77.49,"SICKFr":72.49,"SICKFr (fra-Latn)":null} +{"Rank":42,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"STS22 (fr)":78.7,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":43,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"STS22 (fr)":79.43,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"STS22 (fr)":74.62,"STSBenchmarkMultilingualSTS (fr)":63.85,"SICKFr":62.11,"SICKFr (fra-Latn)":null} +{"Rank":45,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"STS22 (fr)":77.69,"STSBenchmarkMultilingualSTS (fr)":74.04,"SICKFr":71.74,"SICKFr (fra-Latn)":null} +{"Rank":46,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"STS22 (fr)":75.01,"STSBenchmarkMultilingualSTS (fr)":77.59,"SICKFr":72.83,"SICKFr (fra-Latn)":null} +{"Rank":47,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"STS22 (fr)":77.49,"STSBenchmarkMultilingualSTS (fr)":79.42,"SICKFr":75.08,"SICKFr (fra-Latn)":null} +{"Rank":48,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"STS22 (fr)":76.8,"STSBenchmarkMultilingualSTS (fr)":81.24,"SICKFr":77.07,"SICKFr (fra-Latn)":null} +{"Rank":49,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":50,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} +{"Rank":51,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":74.1,"STSBenchmarkMultilingualSTS (fr)":83.48,"SICKFr":77.25,"SICKFr (fra-Latn)":null} +{"Rank":52,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":77.91,"STSBenchmarkMultilingualSTS (fr)":75.48,"SICKFr":71.37,"SICKFr (fra-Latn)":null} +{"Rank":53,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":71.11,"STSBenchmarkMultilingualSTS (fr)":78.16,"SICKFr":74.39,"SICKFr (fra-Latn)":null} +{"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"STS22 (fr)":56.72,"STSBenchmarkMultilingualSTS (fr)":46.23,"SICKFr":48.62,"SICKFr (fra-Latn)":null} +{"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"STS22 (fr)":55.49,"STSBenchmarkMultilingualSTS (fr)":42.32,"SICKFr":50.01,"SICKFr (fra-Latn)":null} +{"Rank":56,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":81.09,"STSBenchmarkMultilingualSTS (fr)":77.55,"SICKFr":76.28,"SICKFr (fra-Latn)":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"STS22 (fr)":null,"STSBenchmarkMultilingualSTS (fr)":null,"SICKFr":null,"SICKFr (fra-Latn)":null} diff --git a/boards_data/fr/data_tasks/Summarization/default.jsonl b/boards_data/fr/data_tasks/Summarization/default.jsonl index 1a54dfa55ae49a7b95992942cd97ee6ab990e5d0..bf31ada5bfb2c7c32e3fa192789053a7d00962fd 100644 --- a/boards_data/fr/data_tasks/Summarization/default.jsonl +++ b/boards_data/fr/data_tasks/Summarization/default.jsonl @@ -1,59 +1,57 @@ -{"level_0":0,"index":53,"Rank":1,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":32.46} -{"level_0":1,"index":43,"Rank":2,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"SummEvalFr":32.22} -{"level_0":2,"index":47,"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"SummEvalFr":31.85} -{"level_0":3,"index":23,"Rank":4,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":31.62} -{"level_0":4,"index":71,"Rank":5,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"SummEvalFr":31.59} -{"level_0":5,"index":5,"Rank":6,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"SummEvalFr":31.56} -{"level_0":6,"index":24,"Rank":7,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":31.55} -{"level_0":7,"index":0,"Rank":8,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":31.47} -{"level_0":8,"index":34,"Rank":9,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":31.45} -{"level_0":9,"index":56,"Rank":10,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":31.45} -{"level_0":10,"index":9,"Rank":11,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"SummEvalFr":31.45} -{"level_0":11,"index":14,"Rank":12,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":31.4} -{"level_0":12,"index":15,"Rank":13,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":31.26} -{"level_0":13,"index":38,"Rank":14,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEvalFr":31.26} -{"level_0":14,"index":10,"Rank":15,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":31.26} -{"level_0":15,"index":45,"Rank":16,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"SummEvalFr":30.92} -{"level_0":16,"index":1,"Rank":17,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.88} -{"level_0":17,"index":36,"Rank":18,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"SummEvalFr":30.88} -{"level_0":18,"index":44,"Rank":19,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"SummEvalFr":30.76} -{"level_0":19,"index":42,"Rank":20,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"SummEvalFr":30.72} -{"level_0":20,"index":8,"Rank":21,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.5} -{"level_0":21,"index":55,"Rank":22,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.5} -{"level_0":22,"index":81,"Rank":23,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.5} -{"level_0":23,"index":72,"Rank":24,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"SummEvalFr":30.39} -{"level_0":24,"index":3,"Rank":25,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.34} -{"level_0":25,"index":22,"Rank":26,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.3} -{"level_0":26,"index":51,"Rank":27,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.28} -{"level_0":27,"index":52,"Rank":28,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.27} -{"level_0":28,"index":70,"Rank":29,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"SummEvalFr":30.23} -{"level_0":29,"index":54,"Rank":30,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.22} -{"level_0":30,"index":21,"Rank":31,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.16} -{"level_0":31,"index":57,"Rank":32,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"SummEvalFr":30.16} -{"level_0":32,"index":50,"Rank":33,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":30.13} -{"level_0":33,"index":69,"Rank":34,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEvalFr":30.01} -{"level_0":34,"index":4,"Rank":35,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":29.96} -{"level_0":35,"index":29,"Rank":36,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":29.69} -{"level_0":36,"index":80,"Rank":37,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":29.65} -{"level_0":37,"index":48,"Rank":38,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":29.48} -{"level_0":38,"index":68,"Rank":39,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"SummEvalFr":29.47} -{"level_0":39,"index":39,"Rank":40,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEvalFr":29.43} -{"level_0":40,"index":75,"Rank":41,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":29.33} -{"level_0":41,"index":40,"Rank":42,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"SummEvalFr":29.25} -{"level_0":42,"index":67,"Rank":43,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"SummEvalFr":29.2} -{"level_0":43,"index":78,"Rank":44,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"SummEvalFr":29.14} -{"level_0":44,"index":17,"Rank":45,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEvalFr":29.13} -{"level_0":45,"index":16,"Rank":46,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEvalFr":29.06} -{"level_0":46,"index":30,"Rank":47,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"SummEvalFr":29.04} -{"level_0":47,"index":79,"Rank":48,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"SummEvalFr":28.89} -{"level_0":48,"index":18,"Rank":49,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEvalFr":28.84} -{"level_0":49,"index":41,"Rank":50,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"SummEvalFr":28.81} -{"level_0":50,"index":35,"Rank":51,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEvalFr":28.77} -{"level_0":51,"index":7,"Rank":52,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":28.72} -{"level_0":52,"index":77,"Rank":53,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":28.56} -{"level_0":53,"index":2,"Rank":54,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":28.34} -{"level_0":54,"index":59,"Rank":55,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"SummEvalFr":28.28} -{"level_0":55,"index":76,"Rank":56,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":28.21} -{"level_0":56,"index":63,"Rank":57,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"SummEvalFr":28.12} -{"level_0":57,"index":66,"Rank":58,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"SummEvalFr":27.59} -{"level_0":58,"index":49,"Rank":59,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEvalFr":23.63} +{"Rank":1,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":31.86,"SummEvalFr":30.76,"SummEvalFr (fra-Latn)":32.96} +{"Rank":2,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":31.5,"SummEvalFr":31.85,"SummEvalFr (fra-Latn)":31.14} +{"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":30.92,"SummEvalFr":30.92,"SummEvalFr (fra-Latn)":30.92} +{"Rank":4,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":30.16,"SummEvalFr":30.16,"SummEvalFr (fra-Latn)":30.16} +{"Rank":5,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":29.47,"SummEvalFr":29.47,"SummEvalFr (fra-Latn)":29.47} +{"Rank":6,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":29.2,"SummEvalFr":29.2,"SummEvalFr (fra-Latn)":29.2} +{"Rank":7,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":28.28,"SummEvalFr":28.28,"SummEvalFr (fra-Latn)":28.29} +{"Rank":8,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":31.47,"SummEvalFr (fra-Latn)":null} +{"Rank":9,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":30.88,"SummEvalFr (fra-Latn)":null} +{"Rank":10,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":28.34,"SummEvalFr (fra-Latn)":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":30.34,"SummEvalFr (fra-Latn)":null} +{"Rank":12,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":29.96,"SummEvalFr (fra-Latn)":null} +{"Rank":13,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"SummEvalFr":31.56,"SummEvalFr (fra-Latn)":null} +{"Rank":14,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":31.4,"SummEvalFr (fra-Latn)":null} +{"Rank":15,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":31.26,"SummEvalFr (fra-Latn)":null} +{"Rank":16,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":17,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":19,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SummEvalFr":29.06,"SummEvalFr (fra-Latn)":null} +{"Rank":20,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SummEvalFr":29.13,"SummEvalFr (fra-Latn)":null} +{"Rank":21,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SummEvalFr":28.84,"SummEvalFr (fra-Latn)":null} +{"Rank":22,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":29.97} +{"Rank":23,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"SummEvalFr":29.04,"SummEvalFr (fra-Latn)":null} +{"Rank":24,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":25,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SummEvalFr":28.77,"SummEvalFr (fra-Latn)":null} +{"Rank":26,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"SummEvalFr":30.88,"SummEvalFr (fra-Latn)":null} +{"Rank":27,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":28,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":29,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SummEvalFr":31.26,"SummEvalFr (fra-Latn)":null} +{"Rank":30,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"SummEvalFr":29.43,"SummEvalFr (fra-Latn)":null} +{"Rank":31,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"SummEvalFr":29.25,"SummEvalFr (fra-Latn)":null} +{"Rank":32,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"SummEvalFr":28.81,"SummEvalFr (fra-Latn)":null} +{"Rank":33,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"SummEvalFr":30.72,"SummEvalFr (fra-Latn)":null} +{"Rank":34,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"SummEvalFr":32.22,"SummEvalFr (fra-Latn)":null} +{"Rank":35,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":29.48,"SummEvalFr (fra-Latn)":null} +{"Rank":36,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":23.63,"SummEvalFr (fra-Latn)":null} +{"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":26.63} +{"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":28.11} +{"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":40,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":41,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"SummEvalFr":28.12,"SummEvalFr (fra-Latn)":null} +{"Rank":42,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":43,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"SummEvalFr":27.59,"SummEvalFr (fra-Latn)":null} +{"Rank":45,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"SummEvalFr":30.01,"SummEvalFr (fra-Latn)":null} +{"Rank":46,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"SummEvalFr":30.23,"SummEvalFr (fra-Latn)":null} +{"Rank":47,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"SummEvalFr":31.59,"SummEvalFr (fra-Latn)":null} +{"Rank":48,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"SummEvalFr":30.39,"SummEvalFr (fra-Latn)":null} +{"Rank":49,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":50,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} +{"Rank":51,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":29.33,"SummEvalFr (fra-Latn)":null} +{"Rank":52,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":28.21,"SummEvalFr (fra-Latn)":null} +{"Rank":53,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":28.56,"SummEvalFr (fra-Latn)":null} +{"Rank":54,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"SummEvalFr":29.14,"SummEvalFr (fra-Latn)":null} +{"Rank":55,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"SummEvalFr":28.89,"SummEvalFr (fra-Latn)":null} +{"Rank":56,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":30.5,"SummEvalFr (fra-Latn)":null} +{"Rank":57,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"SummEvalFr":null,"SummEvalFr (fra-Latn)":null} diff --git a/boards_data/instructions/data_tasks/InstructionRetrieval/default.jsonl b/boards_data/instructions/data_tasks/InstructionRetrieval/default.jsonl index d28caf35f5a53123a73f357393e31be7eea63db1..fb7dfe5e3172f53849b302706f2add0f6315f1a1 100644 --- a/boards_data/instructions/data_tasks/InstructionRetrieval/default.jsonl +++ b/boards_data/instructions/data_tasks/InstructionRetrieval/default.jsonl @@ -1,21 +1,21 @@ -{"level_0":0,"index":15,"Rank":1,"Model":"FollowIR-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":12.15,"Core17InstructionRetrieval":16.48,"News21InstructionRetrieval":6.26,"Robust04InstructionRetrieval":13.72} -{"level_0":1,"index":17,"Rank":2,"Model":"mistral-7b-instruct-v0.2<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":10.15,"Core17InstructionRetrieval":13.03,"News21InstructionRetrieval":4.81,"Robust04InstructionRetrieval":12.61} -{"level_0":2,"index":9,"Rank":3,"Model":"flan-t5-large<\/a>","Model Size (Million Parameters)":770,"Memory Usage (GB, fp32)":2.87,"Average":4.72,"Core17InstructionRetrieval":1.32,"News21InstructionRetrieval":8.95,"Robust04InstructionRetrieval":3.9} -{"level_0":3,"index":5,"Rank":4,"Model":"monot5-3b-msmarco-10k<\/a>","Model Size (Million Parameters)":2480,"Memory Usage (GB, fp32)":9.24,"Average":2.53,"Core17InstructionRetrieval":1.84,"News21InstructionRetrieval":1.78,"Robust04InstructionRetrieval":3.96} -{"level_0":4,"index":0,"Rank":5,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":2.33,"Core17InstructionRetrieval":5.44,"News21InstructionRetrieval":3.94,"Robust04InstructionRetrieval":-2.4} -{"level_0":5,"index":16,"Rank":6,"Model":"llama-2-7b-chat<\/a>","Model Size (Million Parameters)":7000,"Memory Usage (GB, fp32)":26.08,"Average":1.69,"Core17InstructionRetrieval":2.84,"News21InstructionRetrieval":0.23,"Robust04InstructionRetrieval":2.0} -{"level_0":6,"index":7,"Rank":7,"Model":"tart-full-flan-t5-xl<\/a>","Model Size (Million Parameters)":2480,"Memory Usage (GB, fp32)":9.24,"Average":1.36,"Core17InstructionRetrieval":2.82,"News21InstructionRetrieval":1.99,"Robust04InstructionRetrieval":-0.72} -{"level_0":7,"index":8,"Rank":8,"Model":"flan-t5-base<\/a>","Model Size (Million Parameters)":220,"Memory Usage (GB, fp32)":0.82,"Average":0.64,"Core17InstructionRetrieval":-3.31,"News21InstructionRetrieval":-0.12,"Robust04InstructionRetrieval":5.35} -{"level_0":8,"index":3,"Rank":9,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":-0.02,"Core17InstructionRetrieval":2.62,"News21InstructionRetrieval":-1.01,"Robust04InstructionRetrieval":-1.68} -{"level_0":9,"index":2,"Rank":10,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":-0.21,"Core17InstructionRetrieval":2.8,"News21InstructionRetrieval":0.2,"Robust04InstructionRetrieval":-3.63} -{"level_0":10,"index":13,"Rank":11,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":-1.06,"Core17InstructionRetrieval":0.12,"News21InstructionRetrieval":0.87,"Robust04InstructionRetrieval":-4.16} -{"level_0":11,"index":6,"Rank":12,"Model":"monot5-base-msmarco-10k<\/a>","Model Size (Million Parameters)":220,"Memory Usage (GB, fp32)":0.82,"Average":-1.75,"Core17InstructionRetrieval":-4.06,"News21InstructionRetrieval":5.02,"Robust04InstructionRetrieval":-6.2} -{"level_0":12,"index":1,"Rank":13,"Model":"bm25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":-2.09,"Core17InstructionRetrieval":-1.06,"News21InstructionRetrieval":-2.15,"Robust04InstructionRetrieval":-3.06} -{"level_0":13,"index":20,"Rank":14,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":-2.68,"Core17InstructionRetrieval":-0.2,"News21InstructionRetrieval":-2.03,"Robust04InstructionRetrieval":-5.81} -{"level_0":14,"index":11,"Rank":15,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":-2.76,"Core17InstructionRetrieval":0.69,"News21InstructionRetrieval":-0.9,"Robust04InstructionRetrieval":-8.08} -{"level_0":15,"index":14,"Rank":16,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":-3.45,"Core17InstructionRetrieval":0.09,"News21InstructionRetrieval":-0.86,"Robust04InstructionRetrieval":-9.59} -{"level_0":16,"index":4,"Rank":17,"Model":"monobert-large-msmarco<\/a>","Model Size (Million Parameters)":770,"Memory Usage (GB, fp32)":2.87,"Average":-3.47,"Core17InstructionRetrieval":-0.24,"News21InstructionRetrieval":-0.8,"Robust04InstructionRetrieval":-9.36} -{"level_0":17,"index":18,"Rank":18,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":-3.81,"Core17InstructionRetrieval":-2.48,"News21InstructionRetrieval":-2.83,"Robust04InstructionRetrieval":-6.12} -{"level_0":18,"index":12,"Rank":19,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":-3.88,"Core17InstructionRetrieval":-2.9,"News21InstructionRetrieval":-2.0,"Robust04InstructionRetrieval":-6.73} -{"level_0":19,"index":10,"Rank":20,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":-4.43,"Core17InstructionRetrieval":-1.09,"News21InstructionRetrieval":-1.78,"Robust04InstructionRetrieval":-10.42} -{"level_0":20,"index":19,"Rank":21,"Model":"tart-dual-contriever-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":-5.0,"Core17InstructionRetrieval":-3.04,"News21InstructionRetrieval":-2.98,"Robust04InstructionRetrieval":-8.98} +{"Rank":1,"Model":"FollowIR-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":12.15,"Core17InstructionRetrieval":16.48,"News21InstructionRetrieval":6.26,"Robust04InstructionRetrieval":13.72} +{"Rank":2,"Model":"mistral-7b-instruct-v0.2<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":10.15,"Core17InstructionRetrieval":13.03,"News21InstructionRetrieval":4.81,"Robust04InstructionRetrieval":12.61} +{"Rank":3,"Model":"flan-t5-large<\/a>","Model Size (Million Parameters)":770,"Memory Usage (GB, fp32)":2.87,"Average":4.72,"Core17InstructionRetrieval":1.32,"News21InstructionRetrieval":8.95,"Robust04InstructionRetrieval":3.9} +{"Rank":4,"Model":"monot5-3b-msmarco-10k<\/a>","Model Size (Million Parameters)":2480,"Memory Usage (GB, fp32)":9.24,"Average":2.53,"Core17InstructionRetrieval":1.84,"News21InstructionRetrieval":1.78,"Robust04InstructionRetrieval":3.96} +{"Rank":5,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":2.33,"Core17InstructionRetrieval":5.44,"News21InstructionRetrieval":3.94,"Robust04InstructionRetrieval":-2.4} +{"Rank":6,"Model":"llama-2-7b-chat<\/a>","Model Size (Million Parameters)":7000,"Memory Usage (GB, fp32)":26.08,"Average":1.69,"Core17InstructionRetrieval":2.84,"News21InstructionRetrieval":0.23,"Robust04InstructionRetrieval":2.0} +{"Rank":7,"Model":"tart-full-flan-t5-xl<\/a>","Model Size (Million Parameters)":2480,"Memory Usage (GB, fp32)":9.24,"Average":1.36,"Core17InstructionRetrieval":2.82,"News21InstructionRetrieval":1.99,"Robust04InstructionRetrieval":-0.72} +{"Rank":8,"Model":"flan-t5-base<\/a>","Model Size (Million Parameters)":220,"Memory Usage (GB, fp32)":0.82,"Average":0.64,"Core17InstructionRetrieval":-3.31,"News21InstructionRetrieval":-0.12,"Robust04InstructionRetrieval":5.35} +{"Rank":9,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":-0.02,"Core17InstructionRetrieval":2.62,"News21InstructionRetrieval":-1.01,"Robust04InstructionRetrieval":-1.68} +{"Rank":10,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":-0.21,"Core17InstructionRetrieval":2.8,"News21InstructionRetrieval":0.2,"Robust04InstructionRetrieval":-3.63} +{"Rank":11,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":-1.06,"Core17InstructionRetrieval":0.12,"News21InstructionRetrieval":0.87,"Robust04InstructionRetrieval":-4.16} +{"Rank":12,"Model":"monot5-base-msmarco-10k<\/a>","Model Size (Million Parameters)":220,"Memory Usage (GB, fp32)":0.82,"Average":-1.75,"Core17InstructionRetrieval":-4.06,"News21InstructionRetrieval":5.02,"Robust04InstructionRetrieval":-6.2} +{"Rank":13,"Model":"bm25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":-2.09,"Core17InstructionRetrieval":-1.06,"News21InstructionRetrieval":-2.15,"Robust04InstructionRetrieval":-3.06} +{"Rank":14,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":-2.68,"Core17InstructionRetrieval":-0.2,"News21InstructionRetrieval":-2.03,"Robust04InstructionRetrieval":-5.81} +{"Rank":15,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":-2.76,"Core17InstructionRetrieval":0.69,"News21InstructionRetrieval":-0.9,"Robust04InstructionRetrieval":-8.08} +{"Rank":16,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":-3.45,"Core17InstructionRetrieval":0.09,"News21InstructionRetrieval":-0.86,"Robust04InstructionRetrieval":-9.59} +{"Rank":17,"Model":"monobert-large-msmarco<\/a>","Model Size (Million Parameters)":770,"Memory Usage (GB, fp32)":2.87,"Average":-3.47,"Core17InstructionRetrieval":-0.24,"News21InstructionRetrieval":-0.8,"Robust04InstructionRetrieval":-9.36} +{"Rank":18,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":-3.81,"Core17InstructionRetrieval":-2.48,"News21InstructionRetrieval":-2.83,"Robust04InstructionRetrieval":-6.12} +{"Rank":19,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":-3.88,"Core17InstructionRetrieval":-2.9,"News21InstructionRetrieval":-2.0,"Robust04InstructionRetrieval":-6.73} +{"Rank":20,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":-4.43,"Core17InstructionRetrieval":-1.09,"News21InstructionRetrieval":-1.78,"Robust04InstructionRetrieval":-10.42} +{"Rank":21,"Model":"tart-dual-contriever-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":-5.0,"Core17InstructionRetrieval":-3.04,"News21InstructionRetrieval":-2.98,"Robust04InstructionRetrieval":-8.98} diff --git a/boards_data/law/data_tasks/Retrieval/default.jsonl b/boards_data/law/data_tasks/Retrieval/default.jsonl index 34beafa5997c45e8e8345922090321c6ee232d3f..aa49de0a498f7764414a3ec0e47940e1b74beeda 100644 --- a/boards_data/law/data_tasks/Retrieval/default.jsonl +++ b/boards_data/law/data_tasks/Retrieval/default.jsonl @@ -1,11 +1,17 @@ -{"level_0":0,"index":1,"Rank":1,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.39,"AILACasedocs":44.56,"AILAStatutes":45.51,"GerDaLIRSmall":44.91,"LeCaRDv2":72.75,"LegalBenchConsumerContractsQA":83.27,"LegalBenchCorporateLobbying":95.66,"LegalQuAD":67.47,"LegalSummarization":68.96} -{"level_0":1,"index":5,"Rank":2,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":59.77,"AILACasedocs":38.76,"AILAStatutes":38.07,"GerDaLIRSmall":37.18,"LeCaRDv2":68.56,"LegalBenchConsumerContractsQA":75.46,"LegalBenchCorporateLobbying":94.01,"LegalQuAD":59.64,"LegalSummarization":66.51} -{"level_0":2,"index":10,"Rank":3,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.22,"AILACasedocs":39.0,"AILAStatutes":41.31,"GerDaLIRSmall":32.77,"LeCaRDv2":57.2,"LegalBenchConsumerContractsQA":79.39,"LegalBenchCorporateLobbying":95.09,"LegalQuAD":57.47,"LegalSummarization":71.55} -{"level_0":3,"index":0,"Rank":4,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.43,"AILACasedocs":38.2,"AILAStatutes":44.81,"GerDaLIRSmall":17.85,"LeCaRDv2":61.12,"LegalBenchConsumerContractsQA":80.8,"LegalBenchCorporateLobbying":94.11,"LegalQuAD":47.17,"LegalSummarization":67.39} -{"level_0":4,"index":3,"Rank":5,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.04,"AILACasedocs":31.54,"AILAStatutes":27.15,"GerDaLIRSmall":6.05,"LeCaRDv2":21.02,"LegalBenchConsumerContractsQA":77.12,"LegalBenchCorporateLobbying":93.68,"LegalQuAD":26.08,"LegalSummarization":61.7} -{"level_0":5,"index":2,"Rank":6,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":39.22,"AILACasedocs":25.15,"AILAStatutes":20.74,"GerDaLIRSmall":3.96,"LeCaRDv2":22.68,"LegalBenchConsumerContractsQA":73.52,"LegalBenchCorporateLobbying":91.51,"LegalQuAD":16.22,"LegalSummarization":59.99} -{"level_0":6,"index":4,"Rank":7,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","AILACasedocs":35.31,"AILAStatutes":41.8,"GerDaLIRSmall":"","LeCaRDv2":"","LegalBenchConsumerContractsQA":82.1,"LegalBenchCorporateLobbying":95.0,"LegalQuAD":"","LegalSummarization":70.64} -{"level_0":7,"index":6,"Rank":8,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AILACasedocs":16.8,"AILAStatutes":20.71,"GerDaLIRSmall":"","LeCaRDv2":"","LegalBenchConsumerContractsQA":60.21,"LegalBenchCorporateLobbying":88.69,"LegalQuAD":"","LegalSummarization":57.43} -{"level_0":8,"index":7,"Rank":9,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","AILACasedocs":19.72,"AILAStatutes":20.52,"GerDaLIRSmall":"","LeCaRDv2":"","LegalBenchConsumerContractsQA":65.6,"LegalBenchCorporateLobbying":86.41,"LegalQuAD":"","LegalSummarization":59.0} -{"level_0":9,"index":8,"Rank":10,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AILACasedocs":22.51,"AILAStatutes":21.27,"GerDaLIRSmall":"","LeCaRDv2":"","LegalBenchConsumerContractsQA":75.25,"LegalBenchCorporateLobbying":89.04,"LegalQuAD":"","LegalSummarization":58.55} -{"level_0":10,"index":9,"Rank":11,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AILACasedocs":"","AILAStatutes":"","GerDaLIRSmall":"","LeCaRDv2":"","LegalBenchConsumerContractsQA":"","LegalBenchCorporateLobbying":"","LegalQuAD":"","LegalSummarization":""} +{"Rank":1,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AILACasedocs":38.2,"AILAStatutes":44.81,"GerDaLIRSmall":17.85,"GerDaLIRSmall (deu-Latn)":"","LeCaRDv2":61.12,"LeCaRDv2 (zho-Hans)":"","LegalBenchConsumerContractsQA":80.8,"LegalBenchCorporateLobbying":94.11,"LegalQuAD":47.17,"LegalQuAD (deu-Latn)":"","LegalSummarization":67.39} +{"Rank":2,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AILACasedocs":44.56,"AILAStatutes":45.51,"GerDaLIRSmall":44.91,"GerDaLIRSmall (deu-Latn)":"","LeCaRDv2":72.75,"LeCaRDv2 (zho-Hans)":"","LegalBenchConsumerContractsQA":83.27,"LegalBenchCorporateLobbying":95.66,"LegalQuAD":67.47,"LegalQuAD (deu-Latn)":"","LegalSummarization":68.96} +{"Rank":3,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":"","AILACasedocs":25.15,"AILAStatutes":20.74,"GerDaLIRSmall":3.96,"GerDaLIRSmall (deu-Latn)":"","LeCaRDv2":22.68,"LeCaRDv2 (zho-Hans)":"","LegalBenchConsumerContractsQA":73.52,"LegalBenchCorporateLobbying":91.51,"LegalQuAD":16.22,"LegalQuAD (deu-Latn)":"","LegalSummarization":59.99} +{"Rank":4,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AILACasedocs":31.54,"AILAStatutes":27.15,"GerDaLIRSmall":6.05,"GerDaLIRSmall (deu-Latn)":"","LeCaRDv2":21.02,"LeCaRDv2 (zho-Hans)":"","LegalBenchConsumerContractsQA":77.12,"LegalBenchCorporateLobbying":93.68,"LegalQuAD":26.08,"LegalQuAD (deu-Latn)":"","LegalSummarization":61.7} +{"Rank":5,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","AILACasedocs":35.31,"AILAStatutes":41.8,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":20.61,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":64.05,"LegalBenchConsumerContractsQA":82.1,"LegalBenchCorporateLobbying":95.0,"LegalQuAD":"","LegalQuAD (deu-Latn)":44.18,"LegalSummarization":70.64} +{"Rank":6,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","AILACasedocs":38.76,"AILAStatutes":38.07,"GerDaLIRSmall":37.18,"GerDaLIRSmall (deu-Latn)":"","LeCaRDv2":68.56,"LeCaRDv2 (zho-Hans)":"","LegalBenchConsumerContractsQA":75.46,"LegalBenchCorporateLobbying":94.01,"LegalQuAD":59.64,"LegalQuAD (deu-Latn)":"","LegalSummarization":66.51} +{"Rank":7,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AILACasedocs":26.05,"AILAStatutes":20.37,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":15.3,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":59.0,"LegalBenchConsumerContractsQA":69.02,"LegalBenchCorporateLobbying":88.97,"LegalQuAD":"","LegalQuAD (deu-Latn)":47.85,"LegalSummarization":61.69} +{"Rank":8,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","AILACasedocs":26.43,"AILAStatutes":20.84,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":15.72,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":55.83,"LegalBenchConsumerContractsQA":73.3,"LegalBenchCorporateLobbying":89.72,"LegalQuAD":"","LegalQuAD (deu-Latn)":43.17,"LegalSummarization":62.1} +{"Rank":9,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AILACasedocs":23.43,"AILAStatutes":19.01,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":14.81,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":61.58,"LegalBenchConsumerContractsQA":66.98,"LegalBenchCorporateLobbying":89.47,"LegalQuAD":"","LegalQuAD (deu-Latn)":47.8,"LegalSummarization":55.76} +{"Rank":10,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","AILACasedocs":17.67,"AILAStatutes":16.72,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":4.59,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":24.68,"LegalBenchConsumerContractsQA":54.66,"LegalBenchCorporateLobbying":69.39,"LegalQuAD":"","LegalQuAD (deu-Latn)":16.64,"LegalSummarization":53.89} +{"Rank":11,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AILACasedocs":16.8,"AILAStatutes":20.71,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":1.35,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":18.77,"LegalBenchConsumerContractsQA":60.21,"LegalBenchCorporateLobbying":88.69,"LegalQuAD":"","LegalQuAD (deu-Latn)":7.44,"LegalSummarization":57.43} +{"Rank":12,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","AILACasedocs":19.72,"AILAStatutes":20.52,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":2.41,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":17.5,"LegalBenchConsumerContractsQA":65.6,"LegalBenchCorporateLobbying":86.41,"LegalQuAD":"","LegalQuAD (deu-Latn)":11.81,"LegalSummarization":59.0} +{"Rank":13,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AILACasedocs":22.51,"AILAStatutes":21.27,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":3.78,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":18.09,"LegalBenchConsumerContractsQA":75.25,"LegalBenchCorporateLobbying":89.04,"LegalQuAD":"","LegalQuAD (deu-Latn)":10.67,"LegalSummarization":58.55} +{"Rank":14,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AILACasedocs":13.66,"AILAStatutes":20.52,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":2.62,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":32.03,"LegalBenchConsumerContractsQA":49.81,"LegalBenchCorporateLobbying":88.51,"LegalQuAD":"","LegalQuAD (deu-Latn)":13.31,"LegalSummarization":54.97} +{"Rank":15,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AILACasedocs":17.45,"AILAStatutes":22.24,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":3.0,"LeCaRDv2":"","LeCaRDv2 (zho-Hans)":33.91,"LegalBenchConsumerContractsQA":52.37,"LegalBenchCorporateLobbying":87.62,"LegalQuAD":"","LegalQuAD (deu-Latn)":17.8,"LegalSummarization":56.8} +{"Rank":16,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","AILACasedocs":7.43,"AILAStatutes":13.62,"GerDaLIRSmall":"","GerDaLIRSmall (deu-Latn)":"","LeCaRDv2":"","LeCaRDv2 (zho-Hans)":"","LegalBenchConsumerContractsQA":"","LegalBenchCorporateLobbying":"","LegalQuAD":"","LegalQuAD (deu-Latn)":"","LegalSummarization":""} +{"Rank":17,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AILACasedocs":39.0,"AILAStatutes":41.31,"GerDaLIRSmall":32.77,"GerDaLIRSmall (deu-Latn)":"","LeCaRDv2":57.2,"LeCaRDv2 (zho-Hans)":"","LegalBenchConsumerContractsQA":79.39,"LegalBenchCorporateLobbying":95.09,"LegalQuAD":57.47,"LegalQuAD (deu-Latn)":"","LegalSummarization":71.55} diff --git a/boards_data/longembed/data_tasks/Retrieval/default.jsonl b/boards_data/longembed/data_tasks/Retrieval/default.jsonl index 378a134995551d6eaf5f4a22067fdcc462af18cb..9deef24bd1b9ecf7a866b50b9aac3c769c1735c3 100644 --- a/boards_data/longembed/data_tasks/Retrieval/default.jsonl +++ b/boards_data/longembed/data_tasks/Retrieval/default.jsonl @@ -1,13 +1,19 @@ -{"level_0":0,"index":1,"Rank":1,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.17,"LEMBNarrativeQARetrieval":64.69,"LEMBNeedleRetrieval":75.25,"LEMBPasskeyRetrieval":97.0,"LEMBQMSumRetrieval":51.49,"LEMBSummScreenFDRetrieval":99.11,"LEMBWikimQARetrieval":87.49} -{"level_0":1,"index":0,"Rank":2,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.85,"LEMBNarrativeQARetrieval":55.78,"LEMBNeedleRetrieval":80.5,"LEMBPasskeyRetrieval":93.75,"LEMBQMSumRetrieval":57.26,"LEMBSummScreenFDRetrieval":98.72,"LEMBWikimQARetrieval":87.08} -{"level_0":2,"index":6,"Rank":3,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":64.4,"LEMBNarrativeQARetrieval":44.62,"LEMBNeedleRetrieval":48.25,"LEMBPasskeyRetrieval":71.0,"LEMBQMSumRetrieval":43.63,"LEMBSummScreenFDRetrieval":96.82,"LEMBWikimQARetrieval":82.11} -{"level_0":3,"index":7,"Rank":4,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":58.12,"LEMBNarrativeQARetrieval":37.89,"LEMBNeedleRetrieval":54.25,"LEMBPasskeyRetrieval":50.25,"LEMBQMSumRetrieval":38.87,"LEMBSummScreenFDRetrieval":93.48,"LEMBWikimQARetrieval":73.99} -{"level_0":4,"index":2,"Rank":5,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":56.56,"LEMBNarrativeQARetrieval":45.76,"LEMBNeedleRetrieval":40.25,"LEMBPasskeyRetrieval":46.0,"LEMBQMSumRetrieval":35.54,"LEMBSummScreenFDRetrieval":94.09,"LEMBWikimQARetrieval":77.73} -{"level_0":5,"index":4,"Rank":6,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":56.52,"LEMBNarrativeQARetrieval":30.35,"LEMBNeedleRetrieval":41.5,"LEMBPasskeyRetrieval":67.25,"LEMBQMSumRetrieval":35.6,"LEMBSummScreenFDRetrieval":95.23,"LEMBWikimQARetrieval":69.19} -{"level_0":6,"index":8,"Rank":7,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":54.81,"LEMBNarrativeQARetrieval":41.23,"LEMBNeedleRetrieval":39.5,"LEMBPasskeyRetrieval":44.75,"LEMBQMSumRetrieval":36.65,"LEMBSummScreenFDRetrieval":92.97,"LEMBWikimQARetrieval":73.75} -{"level_0":7,"index":12,"Rank":8,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.3,"LEMBNarrativeQARetrieval":44.09,"LEMBNeedleRetrieval":29.25,"LEMBPasskeyRetrieval":63.0,"LEMBQMSumRetrieval":32.49,"LEMBSummScreenFDRetrieval":84.8,"LEMBWikimQARetrieval":54.16} -{"level_0":8,"index":3,"Rank":9,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":47.1,"LEMBNarrativeQARetrieval":41.46,"LEMBNeedleRetrieval":33.25,"LEMBPasskeyRetrieval":38.25,"LEMBQMSumRetrieval":30.32,"LEMBSummScreenFDRetrieval":78.49,"LEMBWikimQARetrieval":60.8} -{"level_0":9,"index":5,"Rank":10,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":40.24,"LEMBNarrativeQARetrieval":25.31,"LEMBNeedleRetrieval":28.5,"LEMBPasskeyRetrieval":33.25,"LEMBQMSumRetrieval":23.83,"LEMBSummScreenFDRetrieval":74.67,"LEMBWikimQARetrieval":55.85} -{"level_0":10,"index":11,"Rank":11,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":31.12,"LEMBNarrativeQARetrieval":19.34,"LEMBNeedleRetrieval":16.0,"LEMBPasskeyRetrieval":24.5,"LEMBQMSumRetrieval":21.54,"LEMBSummScreenFDRetrieval":60.43,"LEMBWikimQARetrieval":44.92} -{"level_0":11,"index":10,"Rank":12,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":29.81,"LEMBNarrativeQARetrieval":18.27,"LEMBNeedleRetrieval":20.0,"LEMBPasskeyRetrieval":23.25,"LEMBQMSumRetrieval":16.32,"LEMBSummScreenFDRetrieval":54.8,"LEMBWikimQARetrieval":46.23} -{"level_0":12,"index":9,"Rank":13,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":25.26,"LEMBNarrativeQARetrieval":19.64,"LEMBNeedleRetrieval":12.25,"LEMBPasskeyRetrieval":14.75,"LEMBQMSumRetrieval":13.08,"LEMBSummScreenFDRetrieval":46.98,"LEMBWikimQARetrieval":44.88} +{"Rank":1,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.17,"LEMBNarrativeQARetrieval":64.69,"LEMBNeedleRetrieval":75.25,"LEMBPasskeyRetrieval":97.0,"LEMBQMSumRetrieval":51.49,"LEMBSummScreenFDRetrieval":99.11,"LEMBWikimQARetrieval":87.49} +{"Rank":2,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.85,"LEMBNarrativeQARetrieval":55.78,"LEMBNeedleRetrieval":80.5,"LEMBPasskeyRetrieval":93.75,"LEMBQMSumRetrieval":57.26,"LEMBSummScreenFDRetrieval":98.72,"LEMBWikimQARetrieval":87.08} +{"Rank":3,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":64.4,"LEMBNarrativeQARetrieval":44.62,"LEMBNeedleRetrieval":48.25,"LEMBPasskeyRetrieval":71.0,"LEMBQMSumRetrieval":43.63,"LEMBSummScreenFDRetrieval":96.82,"LEMBWikimQARetrieval":82.11} +{"Rank":4,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":58.12,"LEMBNarrativeQARetrieval":37.89,"LEMBNeedleRetrieval":54.25,"LEMBPasskeyRetrieval":50.25,"LEMBQMSumRetrieval":38.87,"LEMBSummScreenFDRetrieval":93.48,"LEMBWikimQARetrieval":73.99} +{"Rank":5,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":56.56,"LEMBNarrativeQARetrieval":45.76,"LEMBNeedleRetrieval":40.25,"LEMBPasskeyRetrieval":46.0,"LEMBQMSumRetrieval":35.54,"LEMBSummScreenFDRetrieval":94.09,"LEMBWikimQARetrieval":77.73} +{"Rank":6,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":56.52,"LEMBNarrativeQARetrieval":30.35,"LEMBNeedleRetrieval":41.5,"LEMBPasskeyRetrieval":67.25,"LEMBQMSumRetrieval":35.6,"LEMBSummScreenFDRetrieval":95.23,"LEMBWikimQARetrieval":69.19} +{"Rank":7,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":54.81,"LEMBNarrativeQARetrieval":41.23,"LEMBNeedleRetrieval":39.5,"LEMBPasskeyRetrieval":44.75,"LEMBQMSumRetrieval":36.65,"LEMBSummScreenFDRetrieval":92.97,"LEMBWikimQARetrieval":73.75} +{"Rank":8,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.3,"LEMBNarrativeQARetrieval":44.09,"LEMBNeedleRetrieval":29.25,"LEMBPasskeyRetrieval":63.0,"LEMBQMSumRetrieval":32.49,"LEMBSummScreenFDRetrieval":84.8,"LEMBWikimQARetrieval":54.16} +{"Rank":9,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":47.1,"LEMBNarrativeQARetrieval":41.46,"LEMBNeedleRetrieval":33.25,"LEMBPasskeyRetrieval":38.25,"LEMBQMSumRetrieval":30.32,"LEMBSummScreenFDRetrieval":78.49,"LEMBWikimQARetrieval":60.8} +{"Rank":10,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":40.54,"LEMBNarrativeQARetrieval":23.6,"LEMBNeedleRetrieval":32.0,"LEMBPasskeyRetrieval":38.25,"LEMBQMSumRetrieval":25.16,"LEMBSummScreenFDRetrieval":68.21,"LEMBWikimQARetrieval":56.04} +{"Rank":11,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":40.44,"LEMBNarrativeQARetrieval":24.22,"LEMBNeedleRetrieval":28.0,"LEMBPasskeyRetrieval":38.25,"LEMBQMSumRetrieval":24.26,"LEMBSummScreenFDRetrieval":71.12,"LEMBWikimQARetrieval":56.8} +{"Rank":12,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":40.24,"LEMBNarrativeQARetrieval":25.31,"LEMBNeedleRetrieval":28.5,"LEMBPasskeyRetrieval":33.25,"LEMBQMSumRetrieval":23.83,"LEMBSummScreenFDRetrieval":74.67,"LEMBWikimQARetrieval":55.85} +{"Rank":13,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":38.83,"LEMBNarrativeQARetrieval":22.6,"LEMBNeedleRetrieval":30.75,"LEMBPasskeyRetrieval":38.25,"LEMBQMSumRetrieval":21.51,"LEMBSummScreenFDRetrieval":62.75,"LEMBWikimQARetrieval":57.13} +{"Rank":14,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":31.12,"LEMBNarrativeQARetrieval":19.34,"LEMBNeedleRetrieval":16.0,"LEMBPasskeyRetrieval":24.5,"LEMBQMSumRetrieval":21.54,"LEMBSummScreenFDRetrieval":60.43,"LEMBWikimQARetrieval":44.92} +{"Rank":15,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":29.81,"LEMBNarrativeQARetrieval":18.27,"LEMBNeedleRetrieval":20.0,"LEMBPasskeyRetrieval":23.25,"LEMBQMSumRetrieval":16.32,"LEMBSummScreenFDRetrieval":54.8,"LEMBWikimQARetrieval":46.23} +{"Rank":16,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":25.26,"LEMBNarrativeQARetrieval":19.64,"LEMBNeedleRetrieval":12.25,"LEMBPasskeyRetrieval":14.75,"LEMBQMSumRetrieval":13.08,"LEMBSummScreenFDRetrieval":46.98,"LEMBWikimQARetrieval":44.88} +{"Rank":17,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":21.98,"LEMBNarrativeQARetrieval":11.45,"LEMBNeedleRetrieval":17.5,"LEMBPasskeyRetrieval":20.25,"LEMBQMSumRetrieval":14.07,"LEMBSummScreenFDRetrieval":40.52,"LEMBWikimQARetrieval":28.1} +{"Rank":18,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":21.67,"LEMBNarrativeQARetrieval":16.02,"LEMBNeedleRetrieval":14.0,"LEMBPasskeyRetrieval":7.75,"LEMBQMSumRetrieval":12.23,"LEMBSummScreenFDRetrieval":41.15,"LEMBWikimQARetrieval":38.86} +{"Rank":19,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":20.92,"LEMBNarrativeQARetrieval":13.82,"LEMBNeedleRetrieval":13.5,"LEMBPasskeyRetrieval":8.25,"LEMBQMSumRetrieval":11.02,"LEMBSummScreenFDRetrieval":38.12,"LEMBWikimQARetrieval":40.84} diff --git a/boards_data/no/data_tasks/Classification/default.jsonl b/boards_data/no/data_tasks/Classification/default.jsonl index a6fb06469994e12257dfe9ba3013bdee3b121ceb..d2efe60de93c56eeca8f8b7fc5b011495104b438 100644 --- a/boards_data/no/data_tasks/Classification/default.jsonl +++ b/boards_data/no/data_tasks/Classification/default.jsonl @@ -1,47 +1,32 @@ -{"level_0":0,"index":13,"Rank":1,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":66.73,"MassiveIntentClassification (nb)":62.68,"MassiveScenarioClassification (nb)":67.44,"NoRecClassification":55.46,"NordicLangClassification":85.27,"NorwegianParliament":62.58,"ScalaNbClassification":66.97} -{"level_0":1,"index":12,"Rank":2,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":63.94,"MassiveIntentClassification (nb)":60.67,"MassiveScenarioClassification (nb)":67.31,"NoRecClassification":51.32,"NordicLangClassification":84.69,"NorwegianParliament":57.41,"ScalaNbClassification":62.25} -{"level_0":2,"index":24,"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":63.64,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":62.76,"NordicLangClassification":82.29,"NorwegianParliament":60.36,"ScalaNbClassification":50.44} -{"level_0":3,"index":30,"Rank":4,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"Average":61.75,"MassiveIntentClassification (nb)":54.2,"MassiveScenarioClassification (nb)":60.69,"NoRecClassification":53.4,"NordicLangClassification":82.67,"NorwegianParliament":59.33,"ScalaNbClassification":60.19} -{"level_0":4,"index":23,"Rank":5,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":61.63,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":57.58,"NordicLangClassification":75.94,"NorwegianParliament":59.94,"ScalaNbClassification":50.32} -{"level_0":5,"index":17,"Rank":6,"Model":"dfm-sentence-encoder-large-1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":61.34,"MassiveIntentClassification (nb)":57.57,"MassiveScenarioClassification (nb)":63.66,"NoRecClassification":50.46,"NordicLangClassification":75.98,"NorwegianParliament":57.66,"ScalaNbClassification":62.69} -{"level_0":6,"index":31,"Rank":7,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"Average":60.34,"MassiveIntentClassification (nb)":47.42,"MassiveScenarioClassification (nb)":54.25,"NoRecClassification":50.46,"NordicLangClassification":84.25,"NorwegianParliament":58.85,"ScalaNbClassification":66.79} -{"level_0":7,"index":26,"Rank":8,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":58.86,"MassiveIntentClassification (nb)":53.96,"MassiveScenarioClassification (nb)":59.9,"NoRecClassification":53.96,"NordicLangClassification":75.15,"NorwegianParliament":60.15,"ScalaNbClassification":50.06} -{"level_0":8,"index":16,"Rank":9,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":58.46,"MassiveIntentClassification (nb)":52.49,"MassiveScenarioClassification (nb)":54.59,"NoRecClassification":48.3,"NordicLangClassification":77.68,"NorwegianParliament":58.78,"ScalaNbClassification":58.95} -{"level_0":9,"index":45,"Rank":10,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":55.0,"MassiveIntentClassification (nb)":45.38,"MassiveScenarioClassification (nb)":47.55,"NoRecClassification":46.06,"NordicLangClassification":74.25,"NorwegianParliament":56.79,"ScalaNbClassification":59.99} -{"level_0":10,"index":18,"Rank":11,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.92,"MassiveIntentClassification (nb)":59.9,"MassiveScenarioClassification (nb)":65.81,"NoRecClassification":48.25,"NordicLangClassification":48.4,"NorwegianParliament":55.99,"ScalaNbClassification":51.18} -{"level_0":11,"index":46,"Rank":12,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":54.34,"MassiveIntentClassification (nb)":40.46,"MassiveScenarioClassification (nb)":44.83,"NoRecClassification":46.28,"NordicLangClassification":79.39,"NorwegianParliament":56.75,"ScalaNbClassification":58.33} -{"level_0":12,"index":19,"Rank":13,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":50.14,"MassiveIntentClassification (nb)":41.57,"MassiveScenarioClassification (nb)":50.33,"NoRecClassification":42.0,"NordicLangClassification":59.34,"NorwegianParliament":57.42,"ScalaNbClassification":50.18} -{"level_0":13,"index":20,"Rank":14,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":50.01,"MassiveIntentClassification (nb)":40.63,"MassiveScenarioClassification (nb)":51.91,"NoRecClassification":41.83,"NordicLangClassification":58.3,"NorwegianParliament":57.26,"ScalaNbClassification":50.13} -{"level_0":14,"index":8,"Rank":15,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":48.88,"MassiveIntentClassification (nb)":42.74,"MassiveScenarioClassification (nb)":49.49,"NoRecClassification":43.53,"NordicLangClassification":51.45,"NorwegianParliament":55.74,"ScalaNbClassification":50.34} -{"level_0":15,"index":22,"Rank":16,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":48.46,"MassiveIntentClassification (nb)":40.25,"MassiveScenarioClassification (nb)":48.58,"NoRecClassification":41.84,"NordicLangClassification":53.47,"NorwegianParliament":56.57,"ScalaNbClassification":50.03} -{"level_0":16,"index":6,"Rank":17,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":48.18,"MassiveIntentClassification (nb)":35.75,"MassiveScenarioClassification (nb)":35.76,"NoRecClassification":43.91,"NordicLangClassification":62.45,"NorwegianParliament":57.56,"ScalaNbClassification":53.63} -{"level_0":17,"index":34,"Rank":18,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.28,"MassiveIntentClassification (nb)":39.34,"MassiveScenarioClassification (nb)":44.67,"NoRecClassification":40.02,"NordicLangClassification":54.71,"NorwegianParliament":54.8,"ScalaNbClassification":50.17} -{"level_0":18,"index":29,"Rank":19,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.28,"MassiveIntentClassification (nb)":24.6,"MassiveScenarioClassification (nb)":27.3,"NoRecClassification":45.44,"NordicLangClassification":57.82,"NorwegianParliament":53.25,"ScalaNbClassification":75.28} -{"level_0":19,"index":7,"Rank":20,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":34.34,"MassiveIntentClassification (nb)":5.66,"MassiveScenarioClassification (nb)":11.26,"NoRecClassification":39.72,"NordicLangClassification":44.53,"NorwegianParliament":52.44,"ScalaNbClassification":52.41} -{"level_0":20,"index":0,"Rank":21,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","MassiveIntentClassification (nb)":37.74,"MassiveScenarioClassification (nb)":43.9,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":21,"index":1,"Rank":22,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":36.89,"MassiveScenarioClassification (nb)":44.27,"NoRecClassification":43.53,"NordicLangClassification":"","NorwegianParliament":54.9,"ScalaNbClassification":""} -{"level_0":22,"index":2,"Rank":23,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":28.65,"MassiveScenarioClassification (nb)":35.24,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":23,"index":3,"Rank":24,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":31.07,"MassiveScenarioClassification (nb)":38.02,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":24,"index":4,"Rank":25,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":25,"index":5,"Rank":26,"Model":"e5-dansk-test-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":61.0,"NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":26,"index":9,"Rank":27,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":39.67,"MassiveScenarioClassification (nb)":50.89,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":27,"index":10,"Rank":28,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":39.82,"MassiveScenarioClassification (nb)":39.02,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":28,"index":11,"Rank":29,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":39.48,"MassiveScenarioClassification (nb)":40.47,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":29,"index":14,"Rank":30,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":54.41,"MassiveScenarioClassification (nb)":64.64,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":30,"index":15,"Rank":31,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":49.41,"MassiveScenarioClassification (nb)":51.8,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":31,"index":21,"Rank":32,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","MassiveIntentClassification (nb)":70.93,"MassiveScenarioClassification (nb)":75.7,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":32,"index":25,"Rank":33,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":71.66,"MassiveScenarioClassification (nb)":77.21,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":33,"index":27,"Rank":34,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":46.18,"MassiveScenarioClassification (nb)":50.32,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":34,"index":28,"Rank":35,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":44.12,"MassiveScenarioClassification (nb)":46.79,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":35,"index":32,"Rank":36,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","MassiveIntentClassification (nb)":57.91,"MassiveScenarioClassification (nb)":64.29,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":36,"index":33,"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","MassiveIntentClassification (nb)":41.91,"MassiveScenarioClassification (nb)":47.36,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":37,"index":35,"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":38,"index":36,"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":34.75,"MassiveScenarioClassification (nb)":35.71,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":39,"index":37,"Rank":40,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","MassiveIntentClassification (nb)":46.01,"MassiveScenarioClassification (nb)":54.98,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":40,"index":38,"Rank":41,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":43.78,"MassiveScenarioClassification (nb)":52.71,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":41,"index":39,"Rank":42,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.87,"MassiveScenarioClassification (nb)":54.44,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":42,"index":40,"Rank":43,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":38.53,"MassiveScenarioClassification (nb)":46.6,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":43,"index":41,"Rank":44,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":41.29,"MassiveScenarioClassification (nb)":49.92,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":44,"index":42,"Rank":45,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.91,"MassiveScenarioClassification (nb)":53.43,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":45,"index":43,"Rank":46,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":31.49,"MassiveScenarioClassification (nb)":38.05,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":46,"index":44,"Rank":47,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":54.64,"MassiveScenarioClassification (nb)":60.26,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":65.06,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":62.76,"NoRecClassification (nob-Latn)":58.43,"NordicLangClassification":82.29,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":80.15,"NorwegianParliament":60.36,"ScalaNbClassification":50.44} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":62.42,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":57.58,"NoRecClassification (nob-Latn)":53.74,"NordicLangClassification":75.94,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":75.85,"NorwegianParliament":59.94,"ScalaNbClassification":50.32} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":59.43,"MassiveIntentClassification (nb)":53.96,"MassiveScenarioClassification (nb)":59.9,"NoRecClassification":53.96,"NoRecClassification (nob-Latn)":50.08,"NordicLangClassification":75.15,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":72.15,"NorwegianParliament":60.15,"ScalaNbClassification":50.06} +{"Rank":4,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.04,"MassiveIntentClassification (nb)":39.34,"MassiveScenarioClassification (nb)":44.67,"NoRecClassification":40.02,"NoRecClassification (nob-Latn)":37.93,"NordicLangClassification":54.71,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":54.7,"NorwegianParliament":54.8,"ScalaNbClassification":50.17} +{"Rank":5,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","MassiveIntentClassification (nb)":37.74,"MassiveScenarioClassification (nb)":43.9,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":6,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NoRecClassification (nob-Latn)":52.05,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":63.6,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":7,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","MassiveIntentClassification (nb)":35.75,"MassiveScenarioClassification (nb)":35.76,"NoRecClassification":43.91,"NoRecClassification (nob-Latn)":"","NordicLangClassification":62.45,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":57.56,"ScalaNbClassification":53.63} +{"Rank":8,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":"","MassiveIntentClassification (nb)":5.66,"MassiveScenarioClassification (nb)":11.26,"NoRecClassification":39.72,"NoRecClassification (nob-Latn)":"","NordicLangClassification":44.53,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":52.44,"ScalaNbClassification":52.41} +{"Rank":9,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","MassiveIntentClassification (nb)":42.74,"MassiveScenarioClassification (nb)":49.49,"NoRecClassification":43.53,"NoRecClassification (nob-Latn)":"","NordicLangClassification":51.45,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":55.74,"ScalaNbClassification":50.34} +{"Rank":10,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","MassiveIntentClassification (nb)":60.67,"MassiveScenarioClassification (nb)":67.31,"NoRecClassification":51.32,"NoRecClassification (nob-Latn)":"","NordicLangClassification":84.69,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":57.41,"ScalaNbClassification":62.25} +{"Rank":11,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":"","MassiveIntentClassification (nb)":62.68,"MassiveScenarioClassification (nb)":67.44,"NoRecClassification":55.46,"NoRecClassification (nob-Latn)":"","NordicLangClassification":85.27,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":62.58,"ScalaNbClassification":66.97} +{"Rank":12,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":"","MassiveIntentClassification (nb)":52.49,"MassiveScenarioClassification (nb)":54.59,"NoRecClassification":48.3,"NoRecClassification (nob-Latn)":"","NordicLangClassification":77.68,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":58.78,"ScalaNbClassification":58.95} +{"Rank":13,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":41.57,"MassiveScenarioClassification (nb)":50.33,"NoRecClassification":42.0,"NoRecClassification (nob-Latn)":"","NordicLangClassification":59.34,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":57.42,"ScalaNbClassification":50.18} +{"Rank":14,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","MassiveIntentClassification (nb)":40.63,"MassiveScenarioClassification (nb)":51.91,"NoRecClassification":41.83,"NoRecClassification (nob-Latn)":"","NordicLangClassification":58.3,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":57.26,"ScalaNbClassification":50.13} +{"Rank":15,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","MassiveIntentClassification (nb)":40.25,"MassiveScenarioClassification (nb)":48.58,"NoRecClassification":41.84,"NoRecClassification (nob-Latn)":"","NordicLangClassification":53.47,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":56.57,"ScalaNbClassification":50.03} +{"Rank":16,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","MassiveIntentClassification (nb)":24.6,"MassiveScenarioClassification (nb)":27.3,"NoRecClassification":45.44,"NoRecClassification (nob-Latn)":"","NordicLangClassification":57.82,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":53.25,"ScalaNbClassification":75.28} +{"Rank":17,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"Average":"","MassiveIntentClassification (nb)":54.2,"MassiveScenarioClassification (nb)":60.69,"NoRecClassification":53.4,"NoRecClassification (nob-Latn)":"","NordicLangClassification":82.67,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":59.33,"ScalaNbClassification":60.19} +{"Rank":18,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"Average":"","MassiveIntentClassification (nb)":47.42,"MassiveScenarioClassification (nb)":54.25,"NoRecClassification":50.46,"NoRecClassification (nob-Latn)":"","NordicLangClassification":84.25,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":58.85,"ScalaNbClassification":66.79} +{"Rank":19,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","MassiveIntentClassification (nb)":57.91,"MassiveScenarioClassification (nb)":64.29,"NoRecClassification":"","NoRecClassification (nob-Latn)":45.45,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":35.39,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":20,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","MassiveIntentClassification (nb)":41.91,"MassiveScenarioClassification (nb)":47.36,"NoRecClassification":"","NoRecClassification (nob-Latn)":37.73,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":54.17,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":21,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NoRecClassification (nob-Latn)":38.34,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":50.15,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":22,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":34.75,"MassiveScenarioClassification (nb)":35.71,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":23,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","MassiveIntentClassification (nb)":46.01,"MassiveScenarioClassification (nb)":54.98,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":24,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":43.78,"MassiveScenarioClassification (nb)":52.71,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":25,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.87,"MassiveScenarioClassification (nb)":54.44,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":26,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NoRecClassification (nob-Latn)":46.7,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":42.52,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":27,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NoRecClassification (nob-Latn)":50.32,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":41.57,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":28,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":38.53,"MassiveScenarioClassification (nb)":46.6,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":29,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":41.29,"MassiveScenarioClassification (nb)":49.92,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":30,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.91,"MassiveScenarioClassification (nb)":53.43,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":31,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","MassiveIntentClassification (nb)":45.38,"MassiveScenarioClassification (nb)":47.55,"NoRecClassification":46.06,"NoRecClassification (nob-Latn)":"","NordicLangClassification":74.25,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":56.79,"ScalaNbClassification":59.99} +{"Rank":32,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","MassiveIntentClassification (nb)":40.46,"MassiveScenarioClassification (nb)":44.83,"NoRecClassification":46.28,"NoRecClassification (nob-Latn)":"","NordicLangClassification":79.39,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":56.75,"ScalaNbClassification":58.33} diff --git a/boards_data/other-cls/data_tasks/Classification/default.jsonl b/boards_data/other-cls/data_tasks/Classification/default.jsonl index 6cc2266f3c966d3ef8d2b03ad0762f491bf275ba..d0d84bec7f740f26c57813b82231cb3076a39437 100644 --- a/boards_data/other-cls/data_tasks/Classification/default.jsonl +++ b/boards_data/other-cls/data_tasks/Classification/default.jsonl @@ -1,128 +1,56 @@ -{"level_0":0,"index":70,"Rank":1,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.36,"AmazonCounterfactualClassification (de)":66.71,"AmazonCounterfactualClassification (ja)":76.05,"AmazonReviewsClassification (de)":53.0,"AmazonReviewsClassification (es)":48.81,"AmazonReviewsClassification (fr)":48.51,"AmazonReviewsClassification (ja)":47.71,"AmazonReviewsClassification (zh)":44.66,"MTOPDomainClassification (de)":92.68,"MTOPDomainClassification (es)":92.27,"MTOPDomainClassification (fr)":89.97,"MTOPDomainClassification (hi)":90.03,"MTOPDomainClassification (th)":87.56,"MTOPIntentClassification (de)":79.37,"MTOPIntentClassification (es)":80.34,"MTOPIntentClassification (fr)":76.72,"MTOPIntentClassification (hi)":77.87,"MTOPIntentClassification (th)":77.74,"MassiveIntentClassification (af)":67.66,"MassiveIntentClassification (am)":60.71,"MassiveIntentClassification (ar)":63.61,"MassiveIntentClassification (az)":69.0,"MassiveIntentClassification (bn)":68.54,"MassiveIntentClassification (cy)":63.03,"MassiveIntentClassification (de)":72.47,"MassiveIntentClassification (el)":71.24,"MassiveIntentClassification (es)":72.82,"MassiveIntentClassification (fa)":74.25,"MassiveIntentClassification (fi)":72.29,"MassiveIntentClassification (fr)":73.32,"MassiveIntentClassification (he)":70.22,"MassiveIntentClassification (hi)":71.58,"MassiveIntentClassification (hu)":71.92,"MassiveIntentClassification (hy)":68.07,"MassiveIntentClassification (id)":72.62,"MassiveIntentClassification (is)":65.77,"MassiveIntentClassification (it)":73.45,"MassiveIntentClassification (ja)":74.69,"MassiveIntentClassification (jv)":63.04,"MassiveIntentClassification (ka)":58.91,"MassiveIntentClassification (km)":54.43,"MassiveIntentClassification (kn)":66.33,"MassiveIntentClassification (ko)":70.59,"MassiveIntentClassification (lv)":69.11,"MassiveIntentClassification (ml)":69.7,"MassiveIntentClassification (mn)":66.44,"MassiveIntentClassification (ms)":70.8,"MassiveIntentClassification (my)":64.79,"MassiveIntentClassification (nl)":74.43,"MassiveIntentClassification (pt)":73.63,"MassiveIntentClassification (ro)":71.89,"MassiveIntentClassification (ru)":74.16,"MassiveIntentClassification (sl)":69.96,"MassiveIntentClassification (sq)":69.5,"MassiveIntentClassification (sw)":63.01,"MassiveIntentClassification (ta)":66.91,"MassiveIntentClassification (te)":67.62,"MassiveIntentClassification (th)":69.51,"MassiveIntentClassification (tl)":69.31,"MassiveIntentClassification (tr)":72.24,"MassiveIntentClassification (ur)":67.5,"MassiveIntentClassification (vi)":71.29,"MassiveIntentClassification (zh-TW)":69.38,"MassiveScenarioClassification (af)":73.34,"MassiveScenarioClassification (am)":65.84,"MassiveScenarioClassification (ar)":69.76,"MassiveScenarioClassification (az)":72.02,"MassiveScenarioClassification (bn)":72.76,"MassiveScenarioClassification (cy)":68.02,"MassiveScenarioClassification (de)":77.68,"MassiveScenarioClassification (el)":76.13,"MassiveScenarioClassification (es)":76.97,"MassiveScenarioClassification (fa)":78.1,"MassiveScenarioClassification (fi)":75.21,"MassiveScenarioClassification (fr)":77.07,"MassiveScenarioClassification (he)":73.53,"MassiveScenarioClassification (hi)":75.75,"MassiveScenarioClassification (hu)":77.09,"MassiveScenarioClassification (hy)":71.08,"MassiveScenarioClassification (id)":77.1,"MassiveScenarioClassification (is)":71.26,"MassiveScenarioClassification (it)":77.08,"MassiveScenarioClassification (ja)":79.35,"MassiveScenarioClassification (jv)":68.42,"MassiveScenarioClassification (ka)":66.16,"MassiveScenarioClassification (km)":60.11,"MassiveScenarioClassification (kn)":71.25,"MassiveScenarioClassification (ko)":76.46,"MassiveScenarioClassification (lv)":73.25,"MassiveScenarioClassification (ml)":74.12,"MassiveScenarioClassification (mn)":70.02,"MassiveScenarioClassification (ms)":74.41,"MassiveScenarioClassification (my)":68.4,"MassiveScenarioClassification (nl)":78.52,"MassiveScenarioClassification (pt)":77.12,"MassiveScenarioClassification (ro)":75.26,"MassiveScenarioClassification (ru)":77.71,"MassiveScenarioClassification (sl)":74.84,"MassiveScenarioClassification (sq)":74.72,"MassiveScenarioClassification (sw)":67.92,"MassiveScenarioClassification (ta)":70.93,"MassiveScenarioClassification (te)":72.41,"MassiveScenarioClassification (th)":75.18,"MassiveScenarioClassification (tl)":72.86,"MassiveScenarioClassification (tr)":76.47,"MassiveScenarioClassification (ur)":71.89,"MassiveScenarioClassification (vi)":74.75,"MassiveScenarioClassification (zh-TW)":75.46} -{"level_0":1,"index":69,"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":67.48,"AmazonCounterfactualClassification (de)":71.22,"AmazonCounterfactualClassification (ja)":77.84,"AmazonReviewsClassification (de)":45.4,"AmazonReviewsClassification (es)":43.07,"AmazonReviewsClassification (fr)":41.91,"AmazonReviewsClassification (ja)":40.12,"AmazonReviewsClassification (zh)":38.83,"MTOPDomainClassification (de)":91.95,"MTOPDomainClassification (es)":92.2,"MTOPDomainClassification (fr)":86.41,"MTOPDomainClassification (hi)":89.78,"MTOPDomainClassification (th)":88.75,"MTOPIntentClassification (de)":74.53,"MTOPIntentClassification (es)":75.44,"MTOPIntentClassification (fr)":59.43,"MTOPIntentClassification (hi)":73.12,"MTOPIntentClassification (th)":74.29,"MassiveIntentClassification (af)":62.38,"MassiveIntentClassification (am)":56.07,"MassiveIntentClassification (ar)":60.75,"MassiveIntentClassification (az)":64.87,"MassiveIntentClassification (bn)":63.97,"MassiveIntentClassification (cy)":54.98,"MassiveIntentClassification (de)":69.33,"MassiveIntentClassification (el)":69.09,"MassiveIntentClassification (es)":70.49,"MassiveIntentClassification (fa)":69.91,"MassiveIntentClassification (fi)":70.12,"MassiveIntentClassification (fr)":69.34,"MassiveIntentClassification (he)":67.61,"MassiveIntentClassification (hi)":67.8,"MassiveIntentClassification (hu)":69.69,"MassiveIntentClassification (hy)":62.07,"MassiveIntentClassification (id)":70.05,"MassiveIntentClassification (is)":62.42,"MassiveIntentClassification (it)":70.53,"MassiveIntentClassification (ja)":72.51,"MassiveIntentClassification (jv)":57.91,"MassiveIntentClassification (ka)":51.78,"MassiveIntentClassification (km)":47.02,"MassiveIntentClassification (kn)":62.16,"MassiveIntentClassification (ko)":69.43,"MassiveIntentClassification (lv)":67.08,"MassiveIntentClassification (ml)":65.57,"MassiveIntentClassification (mn)":61.71,"MassiveIntentClassification (ms)":66.04,"MassiveIntentClassification (my)":60.88,"MassiveIntentClassification (nl)":70.45,"MassiveIntentClassification (pt)":70.73,"MassiveIntentClassification (ro)":68.36,"MassiveIntentClassification (ru)":71.7,"MassiveIntentClassification (sl)":67.09,"MassiveIntentClassification (sq)":65.18,"MassiveIntentClassification (sw)":58.5,"MassiveIntentClassification (ta)":62.69,"MassiveIntentClassification (te)":63.02,"MassiveIntentClassification (th)":68.29,"MassiveIntentClassification (tl)":64.77,"MassiveIntentClassification (tr)":69.87,"MassiveIntentClassification (ur)":64.05,"MassiveIntentClassification (vi)":69.38,"MassiveIntentClassification (zh-TW)":66.2,"MassiveScenarioClassification (af)":68.74,"MassiveScenarioClassification (am)":60.59,"MassiveScenarioClassification (ar)":66.23,"MassiveScenarioClassification (az)":66.48,"MassiveScenarioClassification (bn)":67.75,"MassiveScenarioClassification (cy)":59.09,"MassiveScenarioClassification (de)":74.7,"MassiveScenarioClassification (el)":73.9,"MassiveScenarioClassification (es)":74.31,"MassiveScenarioClassification (fa)":72.95,"MassiveScenarioClassification (fi)":73.12,"MassiveScenarioClassification (fr)":73.87,"MassiveScenarioClassification (he)":71.4,"MassiveScenarioClassification (hi)":72.13,"MassiveScenarioClassification (hu)":74.52,"MassiveScenarioClassification (hy)":64.94,"MassiveScenarioClassification (id)":74.12,"MassiveScenarioClassification (is)":67.15,"MassiveScenarioClassification (it)":74.32,"MassiveScenarioClassification (ja)":77.47,"MassiveScenarioClassification (jv)":63.32,"MassiveScenarioClassification (ka)":58.35,"MassiveScenarioClassification (km)":50.88,"MassiveScenarioClassification (kn)":66.44,"MassiveScenarioClassification (ko)":75.05,"MassiveScenarioClassification (lv)":70.5,"MassiveScenarioClassification (ml)":69.94,"MassiveScenarioClassification (mn)":64.79,"MassiveScenarioClassification (ms)":69.88,"MassiveScenarioClassification (my)":63.25,"MassiveScenarioClassification (nl)":74.83,"MassiveScenarioClassification (pt)":73.49,"MassiveScenarioClassification (ro)":71.72,"MassiveScenarioClassification (ru)":75.14,"MassiveScenarioClassification (sl)":71.87,"MassiveScenarioClassification (sq)":70.28,"MassiveScenarioClassification (sw)":63.14,"MassiveScenarioClassification (ta)":66.28,"MassiveScenarioClassification (te)":66.69,"MassiveScenarioClassification (th)":73.45,"MassiveScenarioClassification (tl)":67.71,"MassiveScenarioClassification (tr)":73.99,"MassiveScenarioClassification (ur)":68.15,"MassiveScenarioClassification (vi)":73.11,"MassiveScenarioClassification (zh-TW)":71.81} -{"level_0":2,"index":67,"Rank":3,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":66.64,"AmazonCounterfactualClassification (de)":74.05,"AmazonCounterfactualClassification (ja)":77.22,"AmazonReviewsClassification (de)":53.26,"AmazonReviewsClassification (es)":50.33,"AmazonReviewsClassification (fr)":36.71,"AmazonReviewsClassification (ja)":48.69,"AmazonReviewsClassification (zh)":46.24,"MTOPDomainClassification (de)":92.98,"MTOPDomainClassification (es)":93.37,"MTOPDomainClassification (fr)":74.8,"MTOPDomainClassification (hi)":88.81,"MTOPDomainClassification (th)":85.52,"MTOPIntentClassification (de)":77.77,"MTOPIntentClassification (es)":79.94,"MTOPIntentClassification (fr)":53.97,"MTOPIntentClassification (hi)":72.91,"MTOPIntentClassification (th)":73.24,"MassiveIntentClassification (af)":66.48,"MassiveIntentClassification (am)":44.29,"MassiveIntentClassification (ar)":63.17,"MassiveIntentClassification (az)":64.23,"MassiveIntentClassification (bn)":64.94,"MassiveIntentClassification (cy)":55.48,"MassiveIntentClassification (de)":74.09,"MassiveIntentClassification (el)":68.31,"MassiveIntentClassification (es)":75.09,"MassiveIntentClassification (fa)":72.21,"MassiveIntentClassification (fi)":68.74,"MassiveIntentClassification (fr)":46.39,"MassiveIntentClassification (he)":66.22,"MassiveIntentClassification (hi)":69.45,"MassiveIntentClassification (hu)":69.41,"MassiveIntentClassification (hy)":56.92,"MassiveIntentClassification (id)":72.71,"MassiveIntentClassification (is)":59.91,"MassiveIntentClassification (it)":75.25,"MassiveIntentClassification (ja)":76.36,"MassiveIntentClassification (jv)":57.92,"MassiveIntentClassification (ka)":52.55,"MassiveIntentClassification (km)":46.45,"MassiveIntentClassification (kn)":53.96,"MassiveIntentClassification (ko)":74.21,"MassiveIntentClassification (lv)":59.23,"MassiveIntentClassification (ml)":51.45,"MassiveIntentClassification (mn)":51.38,"MassiveIntentClassification (ms)":69.85,"MassiveIntentClassification (my)":49.15,"MassiveIntentClassification (nl)":74.83,"MassiveIntentClassification (pt)":75.27,"MassiveIntentClassification (ro)":69.63,"MassiveIntentClassification (ru)":76.63,"MassiveIntentClassification (sl)":67.15,"MassiveIntentClassification (sq)":58.84,"MassiveIntentClassification (sw)":57.37,"MassiveIntentClassification (ta)":53.15,"MassiveIntentClassification (te)":51.51,"MassiveIntentClassification (th)":66.91,"MassiveIntentClassification (tl)":68.73,"MassiveIntentClassification (tr)":72.07,"MassiveIntentClassification (ur)":62.09,"MassiveIntentClassification (vi)":71.17,"MassiveIntentClassification (zh-TW)":71.14,"MassiveScenarioClassification (af)":73.37,"MassiveScenarioClassification (am)":47.21,"MassiveScenarioClassification (ar)":69.84,"MassiveScenarioClassification (az)":67.0,"MassiveScenarioClassification (bn)":68.05,"MassiveScenarioClassification (cy)":61.88,"MassiveScenarioClassification (de)":79.03,"MassiveScenarioClassification (el)":72.97,"MassiveScenarioClassification (es)":78.84,"MassiveScenarioClassification (fa)":76.74,"MassiveScenarioClassification (fi)":71.22,"MassiveScenarioClassification (fr)":53.86,"MassiveScenarioClassification (he)":69.64,"MassiveScenarioClassification (hi)":73.51,"MassiveScenarioClassification (hu)":74.06,"MassiveScenarioClassification (hy)":59.55,"MassiveScenarioClassification (id)":77.41,"MassiveScenarioClassification (is)":66.58,"MassiveScenarioClassification (it)":78.39,"MassiveScenarioClassification (ja)":79.62,"MassiveScenarioClassification (jv)":64.29,"MassiveScenarioClassification (ka)":57.52,"MassiveScenarioClassification (km)":52.42,"MassiveScenarioClassification (kn)":58.55,"MassiveScenarioClassification (ko)":78.89,"MassiveScenarioClassification (lv)":63.5,"MassiveScenarioClassification (ml)":54.03,"MassiveScenarioClassification (mn)":54.24,"MassiveScenarioClassification (ms)":75.53,"MassiveScenarioClassification (my)":52.19,"MassiveScenarioClassification (nl)":78.48,"MassiveScenarioClassification (pt)":77.96,"MassiveScenarioClassification (ro)":73.19,"MassiveScenarioClassification (ru)":80.52,"MassiveScenarioClassification (sl)":73.66,"MassiveScenarioClassification (sq)":64.03,"MassiveScenarioClassification (sw)":64.66,"MassiveScenarioClassification (ta)":57.76,"MassiveScenarioClassification (te)":57.27,"MassiveScenarioClassification (th)":72.46,"MassiveScenarioClassification (tl)":73.71,"MassiveScenarioClassification (tr)":75.04,"MassiveScenarioClassification (ur)":67.05,"MassiveScenarioClassification (vi)":75.52,"MassiveScenarioClassification (zh-TW)":76.87} -{"level_0":3,"index":68,"Rank":4,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":64.32,"AmazonCounterfactualClassification (de)":71.72,"AmazonCounterfactualClassification (ja)":73.33,"AmazonReviewsClassification (de)":41.83,"AmazonReviewsClassification (es)":40.53,"AmazonReviewsClassification (fr)":40.94,"AmazonReviewsClassification (ja)":37.44,"AmazonReviewsClassification (zh)":37.23,"MTOPDomainClassification (de)":89.63,"MTOPDomainClassification (es)":90.59,"MTOPDomainClassification (fr)":84.79,"MTOPDomainClassification (hi)":87.32,"MTOPDomainClassification (th)":86.24,"MTOPIntentClassification (de)":71.23,"MTOPIntentClassification (es)":71.27,"MTOPIntentClassification (fr)":55.51,"MTOPIntentClassification (hi)":69.24,"MTOPIntentClassification (th)":71.71,"MassiveIntentClassification (af)":59.98,"MassiveIntentClassification (am)":53.66,"MassiveIntentClassification (ar)":57.46,"MassiveIntentClassification (az)":62.59,"MassiveIntentClassification (bn)":61.13,"MassiveIntentClassification (cy)":50.06,"MassiveIntentClassification (de)":66.09,"MassiveIntentClassification (el)":64.68,"MassiveIntentClassification (es)":68.4,"MassiveIntentClassification (fa)":67.25,"MassiveIntentClassification (fi)":65.78,"MassiveIntentClassification (fr)":67.95,"MassiveIntentClassification (he)":62.05,"MassiveIntentClassification (hi)":64.95,"MassiveIntentClassification (hu)":64.97,"MassiveIntentClassification (hy)":60.08,"MassiveIntentClassification (id)":66.64,"MassiveIntentClassification (is)":56.39,"MassiveIntentClassification (it)":68.93,"MassiveIntentClassification (ja)":68.94,"MassiveIntentClassification (jv)":54.26,"MassiveIntentClassification (ka)":48.99,"MassiveIntentClassification (km)":44.69,"MassiveIntentClassification (kn)":59.19,"MassiveIntentClassification (ko)":66.34,"MassiveIntentClassification (lv)":60.34,"MassiveIntentClassification (ml)":63.09,"MassiveIntentClassification (mn)":58.76,"MassiveIntentClassification (ms)":62.48,"MassiveIntentClassification (my)":58.56,"MassiveIntentClassification (nl)":67.3,"MassiveIntentClassification (pt)":68.98,"MassiveIntentClassification (ro)":65.54,"MassiveIntentClassification (ru)":69.02,"MassiveIntentClassification (sl)":62.35,"MassiveIntentClassification (sq)":61.23,"MassiveIntentClassification (sw)":56.0,"MassiveIntentClassification (ta)":58.71,"MassiveIntentClassification (te)":59.72,"MassiveIntentClassification (th)":65.6,"MassiveIntentClassification (tl)":60.86,"MassiveIntentClassification (tr)":67.41,"MassiveIntentClassification (ur)":61.52,"MassiveIntentClassification (vi)":66.17,"MassiveIntentClassification (zh-TW)":64.65,"MassiveScenarioClassification (af)":65.09,"MassiveScenarioClassification (am)":58.52,"MassiveScenarioClassification (ar)":62.24,"MassiveScenarioClassification (az)":63.75,"MassiveScenarioClassification (bn)":65.0,"MassiveScenarioClassification (cy)":52.84,"MassiveScenarioClassification (de)":71.95,"MassiveScenarioClassification (el)":70.18,"MassiveScenarioClassification (es)":71.5,"MassiveScenarioClassification (fa)":70.25,"MassiveScenarioClassification (fi)":69.13,"MassiveScenarioClassification (fr)":71.89,"MassiveScenarioClassification (he)":67.44,"MassiveScenarioClassification (hi)":69.16,"MassiveScenarioClassification (hu)":70.75,"MassiveScenarioClassification (hy)":63.14,"MassiveScenarioClassification (id)":70.7,"MassiveScenarioClassification (is)":60.94,"MassiveScenarioClassification (it)":72.32,"MassiveScenarioClassification (ja)":74.65,"MassiveScenarioClassification (jv)":59.69,"MassiveScenarioClassification (ka)":54.37,"MassiveScenarioClassification (km)":48.31,"MassiveScenarioClassification (kn)":62.15,"MassiveScenarioClassification (ko)":72.45,"MassiveScenarioClassification (lv)":62.81,"MassiveScenarioClassification (ml)":68.04,"MassiveScenarioClassification (mn)":61.44,"MassiveScenarioClassification (ms)":66.9,"MassiveScenarioClassification (my)":61.64,"MassiveScenarioClassification (nl)":72.11,"MassiveScenarioClassification (pt)":70.83,"MassiveScenarioClassification (ro)":69.19,"MassiveScenarioClassification (ru)":72.99,"MassiveScenarioClassification (sl)":65.26,"MassiveScenarioClassification (sq)":66.49,"MassiveScenarioClassification (sw)":59.89,"MassiveScenarioClassification (ta)":62.38,"MassiveScenarioClassification (te)":62.59,"MassiveScenarioClassification (th)":71.61,"MassiveScenarioClassification (tl)":62.74,"MassiveScenarioClassification (tr)":71.67,"MassiveScenarioClassification (ur)":64.64,"MassiveScenarioClassification (vi)":70.01,"MassiveScenarioClassification (zh-TW)":70.69} -{"level_0":4,"index":55,"Rank":5,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.11,"AmazonCounterfactualClassification (de)":72.02,"AmazonCounterfactualClassification (ja)":71.79,"AmazonReviewsClassification (de)":34.61,"AmazonReviewsClassification (es)":35.17,"AmazonReviewsClassification (fr)":34.91,"AmazonReviewsClassification (ja)":31.84,"AmazonReviewsClassification (zh)":31.91,"MTOPDomainClassification (de)":89.54,"MTOPDomainClassification (es)":90.62,"MTOPDomainClassification (fr)":86.19,"MTOPDomainClassification (hi)":89.42,"MTOPDomainClassification (th)":85.9,"MTOPIntentClassification (de)":73.43,"MTOPIntentClassification (es)":73.84,"MTOPIntentClassification (fr)":66.75,"MTOPIntentClassification (hi)":69.14,"MTOPIntentClassification (th)":68.47,"MassiveIntentClassification (af)":58.15,"MassiveIntentClassification (am)":57.91,"MassiveIntentClassification (ar)":57.23,"MassiveIntentClassification (az)":57.17,"MassiveIntentClassification (bn)":62.24,"MassiveIntentClassification (cy)":51.19,"MassiveIntentClassification (de)":61.31,"MassiveIntentClassification (el)":64.21,"MassiveIntentClassification (es)":63.97,"MassiveIntentClassification (fa)":66.67,"MassiveIntentClassification (fi)":62.86,"MassiveIntentClassification (fr)":58.55,"MassiveIntentClassification (he)":63.45,"MassiveIntentClassification (hi)":61.32,"MassiveIntentClassification (hu)":62.91,"MassiveIntentClassification (hy)":60.03,"MassiveIntentClassification (id)":63.29,"MassiveIntentClassification (is)":56.02,"MassiveIntentClassification (it)":64.44,"MassiveIntentClassification (ja)":63.7,"MassiveIntentClassification (jv)":52.0,"MassiveIntentClassification (ka)":54.09,"MassiveIntentClassification (km)":43.34,"MassiveIntentClassification (kn)":57.87,"MassiveIntentClassification (ko)":62.69,"MassiveIntentClassification (lv)":56.24,"MassiveIntentClassification (ml)":62.81,"MassiveIntentClassification (mn)":58.49,"MassiveIntentClassification (ms)":61.56,"MassiveIntentClassification (my)":59.4,"MassiveIntentClassification (nl)":64.56,"MassiveIntentClassification (pt)":63.49,"MassiveIntentClassification (ro)":62.53,"MassiveIntentClassification (ru)":62.63,"MassiveIntentClassification (sl)":63.43,"MassiveIntentClassification (sq)":61.45,"MassiveIntentClassification (sw)":56.25,"MassiveIntentClassification (ta)":59.75,"MassiveIntentClassification (te)":59.61,"MassiveIntentClassification (th)":59.42,"MassiveIntentClassification (tl)":58.12,"MassiveIntentClassification (tr)":60.91,"MassiveIntentClassification (ur)":59.49,"MassiveIntentClassification (vi)":60.48,"MassiveIntentClassification (zh-TW)":56.73,"MassiveScenarioClassification (af)":64.06,"MassiveScenarioClassification (am)":63.24,"MassiveScenarioClassification (ar)":63.69,"MassiveScenarioClassification (az)":60.86,"MassiveScenarioClassification (bn)":67.17,"MassiveScenarioClassification (cy)":56.52,"MassiveScenarioClassification (de)":67.48,"MassiveScenarioClassification (el)":70.23,"MassiveScenarioClassification (es)":69.08,"MassiveScenarioClassification (fa)":72.1,"MassiveScenarioClassification (fi)":67.16,"MassiveScenarioClassification (fr)":63.02,"MassiveScenarioClassification (he)":68.83,"MassiveScenarioClassification (hi)":66.9,"MassiveScenarioClassification (hu)":69.33,"MassiveScenarioClassification (hy)":65.82,"MassiveScenarioClassification (id)":68.98,"MassiveScenarioClassification (is)":63.14,"MassiveScenarioClassification (it)":70.04,"MassiveScenarioClassification (ja)":70.68,"MassiveScenarioClassification (jv)":59.79,"MassiveScenarioClassification (ka)":61.03,"MassiveScenarioClassification (km)":49.05,"MassiveScenarioClassification (kn)":63.78,"MassiveScenarioClassification (ko)":69.6,"MassiveScenarioClassification (lv)":59.97,"MassiveScenarioClassification (ml)":69.2,"MassiveScenarioClassification (mn)":62.72,"MassiveScenarioClassification (ms)":67.87,"MassiveScenarioClassification (my)":64.98,"MassiveScenarioClassification (nl)":69.8,"MassiveScenarioClassification (pt)":67.5,"MassiveScenarioClassification (ro)":67.53,"MassiveScenarioClassification (ru)":67.96,"MassiveScenarioClassification (sl)":69.57,"MassiveScenarioClassification (sq)":68.48,"MassiveScenarioClassification (sw)":63.18,"MassiveScenarioClassification (ta)":64.85,"MassiveScenarioClassification (te)":65.39,"MassiveScenarioClassification (th)":67.99,"MassiveScenarioClassification (tl)":63.4,"MassiveScenarioClassification (tr)":65.77,"MassiveScenarioClassification (ur)":65.81,"MassiveScenarioClassification (vi)":66.52,"MassiveScenarioClassification (zh-TW)":63.3} -{"level_0":5,"index":71,"Rank":6,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":61.63,"AmazonCounterfactualClassification (de)":71.65,"AmazonCounterfactualClassification (ja)":64.19,"AmazonReviewsClassification (de)":40.25,"AmazonReviewsClassification (es)":40.39,"AmazonReviewsClassification (fr)":39.68,"AmazonReviewsClassification (ja)":37.68,"AmazonReviewsClassification (zh)":37.5,"MTOPDomainClassification (de)":87.47,"MTOPDomainClassification (es)":89.27,"MTOPDomainClassification (fr)":81.2,"MTOPDomainClassification (hi)":86.02,"MTOPDomainClassification (th)":85.35,"MTOPIntentClassification (de)":65.86,"MTOPIntentClassification (es)":67.97,"MTOPIntentClassification (fr)":46.01,"MTOPIntentClassification (hi)":66.3,"MTOPIntentClassification (th)":67.52,"MassiveIntentClassification (af)":57.07,"MassiveIntentClassification (am)":51.96,"MassiveIntentClassification (ar)":54.43,"MassiveIntentClassification (az)":59.8,"MassiveIntentClassification (bn)":59.38,"MassiveIntentClassification (cy)":46.56,"MassiveIntentClassification (de)":62.73,"MassiveIntentClassification (el)":61.6,"MassiveIntentClassification (es)":66.31,"MassiveIntentClassification (fa)":65.54,"MassiveIntentClassification (fi)":61.46,"MassiveIntentClassification (fr)":65.47,"MassiveIntentClassification (he)":58.05,"MassiveIntentClassification (hi)":64.07,"MassiveIntentClassification (hu)":60.95,"MassiveIntentClassification (hy)":57.4,"MassiveIntentClassification (id)":64.17,"MassiveIntentClassification (is)":52.26,"MassiveIntentClassification (it)":65.54,"MassiveIntentClassification (ja)":68.23,"MassiveIntentClassification (jv)":50.85,"MassiveIntentClassification (ka)":48.45,"MassiveIntentClassification (km)":42.83,"MassiveIntentClassification (kn)":57.51,"MassiveIntentClassification (ko)":63.79,"MassiveIntentClassification (lv)":54.99,"MassiveIntentClassification (ml)":61.9,"MassiveIntentClassification (mn)":57.1,"MassiveIntentClassification (ms)":58.99,"MassiveIntentClassification (my)":55.9,"MassiveIntentClassification (nl)":65.64,"MassiveIntentClassification (pt)":66.85,"MassiveIntentClassification (ro)":60.81,"MassiveIntentClassification (ru)":65.76,"MassiveIntentClassification (sl)":56.52,"MassiveIntentClassification (sq)":57.99,"MassiveIntentClassification (sw)":53.57,"MassiveIntentClassification (ta)":57.26,"MassiveIntentClassification (te)":57.83,"MassiveIntentClassification (th)":64.07,"MassiveIntentClassification (tl)":58.91,"MassiveIntentClassification (tr)":63.54,"MassiveIntentClassification (ur)":59.28,"MassiveIntentClassification (vi)":64.07,"MassiveIntentClassification (zh-TW)":62.54,"MassiveScenarioClassification (af)":63.04,"MassiveScenarioClassification (am)":56.84,"MassiveScenarioClassification (ar)":59.62,"MassiveScenarioClassification (az)":60.85,"MassiveScenarioClassification (bn)":62.77,"MassiveScenarioClassification (cy)":50.18,"MassiveScenarioClassification (de)":69.19,"MassiveScenarioClassification (el)":67.07,"MassiveScenarioClassification (es)":69.83,"MassiveScenarioClassification (fa)":68.71,"MassiveScenarioClassification (fi)":65.95,"MassiveScenarioClassification (fr)":68.76,"MassiveScenarioClassification (he)":63.81,"MassiveScenarioClassification (hi)":67.69,"MassiveScenarioClassification (hu)":66.47,"MassiveScenarioClassification (hy)":59.5,"MassiveScenarioClassification (id)":67.92,"MassiveScenarioClassification (is)":56.49,"MassiveScenarioClassification (it)":69.04,"MassiveScenarioClassification (ja)":73.89,"MassiveScenarioClassification (jv)":56.63,"MassiveScenarioClassification (ka)":52.24,"MassiveScenarioClassification (km)":46.62,"MassiveScenarioClassification (kn)":59.16,"MassiveScenarioClassification (ko)":69.85,"MassiveScenarioClassification (lv)":56.66,"MassiveScenarioClassification (ml)":66.54,"MassiveScenarioClassification (mn)":59.31,"MassiveScenarioClassification (ms)":64.88,"MassiveScenarioClassification (my)":58.86,"MassiveScenarioClassification (nl)":70.87,"MassiveScenarioClassification (pt)":68.18,"MassiveScenarioClassification (ro)":64.65,"MassiveScenarioClassification (ru)":69.48,"MassiveScenarioClassification (sl)":60.18,"MassiveScenarioClassification (sq)":62.86,"MassiveScenarioClassification (sw)":58.15,"MassiveScenarioClassification (ta)":59.44,"MassiveScenarioClassification (te)":60.85,"MassiveScenarioClassification (th)":70.66,"MassiveScenarioClassification (tl)":60.88,"MassiveScenarioClassification (tr)":68.05,"MassiveScenarioClassification (ur)":62.11,"MassiveScenarioClassification (vi)":67.44,"MassiveScenarioClassification (zh-TW)":68.32} -{"level_0":6,"index":96,"Rank":7,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":60.56,"AmazonCounterfactualClassification (de)":73.17,"AmazonCounterfactualClassification (ja)":76.42,"AmazonReviewsClassification (de)":39.92,"AmazonReviewsClassification (es)":39.39,"AmazonReviewsClassification (fr)":38.52,"AmazonReviewsClassification (ja)":36.44,"AmazonReviewsClassification (zh)":36.45,"MTOPDomainClassification (de)":86.95,"MTOPDomainClassification (es)":84.07,"MTOPDomainClassification (fr)":84.14,"MTOPDomainClassification (hi)":85.11,"MTOPDomainClassification (th)":81.24,"MTOPIntentClassification (de)":63.42,"MTOPIntentClassification (es)":64.44,"MTOPIntentClassification (fr)":62.01,"MTOPIntentClassification (hi)":62.58,"MTOPIntentClassification (th)":64.61,"MassiveIntentClassification (af)":56.12,"MassiveIntentClassification (am)":55.71,"MassiveIntentClassification (ar)":50.86,"MassiveIntentClassification (az)":58.97,"MassiveIntentClassification (bn)":58.22,"MassiveIntentClassification (cy)":50.16,"MassiveIntentClassification (de)":56.21,"MassiveIntentClassification (el)":57.03,"MassiveIntentClassification (es)":58.32,"MassiveIntentClassification (fa)":62.33,"MassiveIntentClassification (fi)":60.12,"MassiveIntentClassification (fr)":60.47,"MassiveIntentClassification (he)":56.55,"MassiveIntentClassification (hi)":59.4,"MassiveIntentClassification (hu)":59.52,"MassiveIntentClassification (hy)":56.2,"MassiveIntentClassification (id)":61.12,"MassiveIntentClassification (is)":54.9,"MassiveIntentClassification (it)":59.83,"MassiveIntentClassification (ja)":63.11,"MassiveIntentClassification (jv)":50.98,"MassiveIntentClassification (ka)":48.35,"MassiveIntentClassification (km)":48.55,"MassiveIntentClassification (kn)":56.24,"MassiveIntentClassification (ko)":60.99,"MassiveIntentClassification (lv)":57.1,"MassiveIntentClassification (ml)":57.91,"MassiveIntentClassification (mn)":58.5,"MassiveIntentClassification (ms)":58.6,"MassiveIntentClassification (my)":57.35,"MassiveIntentClassification (nl)":59.37,"MassiveIntentClassification (pt)":60.16,"MassiveIntentClassification (ro)":57.92,"MassiveIntentClassification (ru)":60.67,"MassiveIntentClassification (sl)":59.37,"MassiveIntentClassification (sq)":58.03,"MassiveIntentClassification (sw)":51.62,"MassiveIntentClassification (ta)":55.04,"MassiveIntentClassification (te)":58.32,"MassiveIntentClassification (th)":56.58,"MassiveIntentClassification (tl)":55.28,"MassiveIntentClassification (tr)":60.91,"MassiveIntentClassification (ur)":56.7,"MassiveIntentClassification (vi)":56.67,"MassiveIntentClassification (zh-TW)":59.51,"MassiveScenarioClassification (af)":63.39,"MassiveScenarioClassification (am)":62.02,"MassiveScenarioClassification (ar)":57.72,"MassiveScenarioClassification (az)":63.48,"MassiveScenarioClassification (bn)":61.84,"MassiveScenarioClassification (cy)":56.13,"MassiveScenarioClassification (de)":62.39,"MassiveScenarioClassification (el)":64.58,"MassiveScenarioClassification (es)":63.61,"MassiveScenarioClassification (fa)":67.46,"MassiveScenarioClassification (fi)":64.58,"MassiveScenarioClassification (fr)":65.1,"MassiveScenarioClassification (he)":63.53,"MassiveScenarioClassification (hi)":64.4,"MassiveScenarioClassification (hu)":65.82,"MassiveScenarioClassification (hy)":61.25,"MassiveScenarioClassification (id)":65.84,"MassiveScenarioClassification (is)":61.94,"MassiveScenarioClassification (it)":64.09,"MassiveScenarioClassification (ja)":67.72,"MassiveScenarioClassification (jv)":58.29,"MassiveScenarioClassification (ka)":53.38,"MassiveScenarioClassification (km)":56.18,"MassiveScenarioClassification (kn)":61.74,"MassiveScenarioClassification (ko)":67.26,"MassiveScenarioClassification (lv)":61.87,"MassiveScenarioClassification (ml)":62.26,"MassiveScenarioClassification (mn)":62.6,"MassiveScenarioClassification (ms)":65.63,"MassiveScenarioClassification (my)":62.94,"MassiveScenarioClassification (nl)":65.16,"MassiveScenarioClassification (pt)":63.28,"MassiveScenarioClassification (ro)":62.41,"MassiveScenarioClassification (ru)":65.25,"MassiveScenarioClassification (sl)":64.25,"MassiveScenarioClassification (sq)":64.54,"MassiveScenarioClassification (sw)":58.36,"MassiveScenarioClassification (ta)":59.08,"MassiveScenarioClassification (te)":64.13,"MassiveScenarioClassification (th)":64.34,"MassiveScenarioClassification (tl)":60.23,"MassiveScenarioClassification (tr)":65.43,"MassiveScenarioClassification (ur)":61.52,"MassiveScenarioClassification (vi)":61.05,"MassiveScenarioClassification (zh-TW)":67.08} -{"level_0":7,"index":46,"Rank":8,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.73,"AmazonCounterfactualClassification (de)":70.94,"AmazonCounterfactualClassification (ja)":80.06,"AmazonReviewsClassification (de)":38.83,"AmazonReviewsClassification (es)":39.39,"AmazonReviewsClassification (fr)":39.47,"AmazonReviewsClassification (ja)":35.9,"AmazonReviewsClassification (zh)":36.07,"MTOPDomainClassification (de)":86.91,"MTOPDomainClassification (es)":87.74,"MTOPDomainClassification (fr)":86.22,"MTOPDomainClassification (hi)":82.92,"MTOPDomainClassification (th)":69.9,"MTOPIntentClassification (de)":63.26,"MTOPIntentClassification (es)":65.06,"MTOPIntentClassification (fr)":59.43,"MTOPIntentClassification (hi)":59.08,"MTOPIntentClassification (th)":43.75,"MassiveIntentClassification (af)":47.6,"MassiveIntentClassification (am)":31.57,"MassiveIntentClassification (ar)":52.63,"MassiveIntentClassification (az)":50.09,"MassiveIntentClassification (bn)":46.54,"MassiveIntentClassification (cy)":44.26,"MassiveIntentClassification (de)":61.87,"MassiveIntentClassification (el)":47.02,"MassiveIntentClassification (es)":62.54,"MassiveIntentClassification (fa)":55.19,"MassiveIntentClassification (fi)":48.43,"MassiveIntentClassification (fr)":64.27,"MassiveIntentClassification (he)":57.62,"MassiveIntentClassification (hi)":57.54,"MassiveIntentClassification (hu)":45.67,"MassiveIntentClassification (hy)":39.2,"MassiveIntentClassification (id)":55.0,"MassiveIntentClassification (is)":43.14,"MassiveIntentClassification (it)":61.0,"MassiveIntentClassification (ja)":64.29,"MassiveIntentClassification (jv)":43.69,"MassiveIntentClassification (ka)":38.35,"MassiveIntentClassification (km)":34.22,"MassiveIntentClassification (kn)":51.79,"MassiveIntentClassification (ko)":59.59,"MassiveIntentClassification (lv)":46.54,"MassiveIntentClassification (ml)":54.47,"MassiveIntentClassification (mn)":40.68,"MassiveIntentClassification (ms)":51.24,"MassiveIntentClassification (my)":31.76,"MassiveIntentClassification (nl)":60.82,"MassiveIntentClassification (pt)":62.74,"MassiveIntentClassification (ro)":49.68,"MassiveIntentClassification (ru)":60.85,"MassiveIntentClassification (sl)":48.59,"MassiveIntentClassification (sq)":47.17,"MassiveIntentClassification (sw)":45.97,"MassiveIntentClassification (ta)":53.6,"MassiveIntentClassification (te)":53.45,"MassiveIntentClassification (th)":46.17,"MassiveIntentClassification (tl)":49.48,"MassiveIntentClassification (tr)":58.03,"MassiveIntentClassification (ur)":39.26,"MassiveIntentClassification (vi)":52.16,"MassiveIntentClassification (zh-TW)":58.21,"MassiveScenarioClassification (af)":58.07,"MassiveScenarioClassification (am)":38.21,"MassiveScenarioClassification (ar)":57.47,"MassiveScenarioClassification (az)":54.37,"MassiveScenarioClassification (bn)":52.72,"MassiveScenarioClassification (cy)":49.5,"MassiveScenarioClassification (de)":71.28,"MassiveScenarioClassification (el)":52.42,"MassiveScenarioClassification (es)":67.04,"MassiveScenarioClassification (fa)":60.17,"MassiveScenarioClassification (fi)":54.05,"MassiveScenarioClassification (fr)":69.76,"MassiveScenarioClassification (he)":62.85,"MassiveScenarioClassification (hi)":62.18,"MassiveScenarioClassification (hu)":53.52,"MassiveScenarioClassification (hy)":45.95,"MassiveScenarioClassification (id)":60.33,"MassiveScenarioClassification (is)":50.1,"MassiveScenarioClassification (it)":66.49,"MassiveScenarioClassification (ja)":68.36,"MassiveScenarioClassification (jv)":50.59,"MassiveScenarioClassification (ka)":42.76,"MassiveScenarioClassification (km)":40.65,"MassiveScenarioClassification (kn)":57.25,"MassiveScenarioClassification (ko)":63.84,"MassiveScenarioClassification (lv)":53.14,"MassiveScenarioClassification (ml)":58.84,"MassiveScenarioClassification (mn)":44.82,"MassiveScenarioClassification (ms)":58.9,"MassiveScenarioClassification (my)":38.52,"MassiveScenarioClassification (nl)":67.54,"MassiveScenarioClassification (pt)":65.7,"MassiveScenarioClassification (ro)":57.2,"MassiveScenarioClassification (ru)":65.42,"MassiveScenarioClassification (sl)":55.15,"MassiveScenarioClassification (sq)":55.68,"MassiveScenarioClassification (sw)":52.3,"MassiveScenarioClassification (ta)":56.19,"MassiveScenarioClassification (te)":58.02,"MassiveScenarioClassification (th)":52.56,"MassiveScenarioClassification (tl)":57.43,"MassiveScenarioClassification (tr)":61.55,"MassiveScenarioClassification (ur)":47.11,"MassiveScenarioClassification (vi)":56.83,"MassiveScenarioClassification (zh-TW)":64.02} -{"level_0":8,"index":49,"Rank":9,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.35,"AmazonCounterfactualClassification (de)":61.35,"AmazonCounterfactualClassification (ja)":58.23,"AmazonReviewsClassification (de)":29.7,"AmazonReviewsClassification (es)":35.97,"AmazonReviewsClassification (fr)":35.92,"AmazonReviewsClassification (ja)":27.64,"AmazonReviewsClassification (zh)":32.63,"MTOPDomainClassification (de)":82.05,"MTOPDomainClassification (es)":93.55,"MTOPDomainClassification (fr)":90.98,"MTOPDomainClassification (hi)":89.33,"MTOPDomainClassification (th)":60.49,"MTOPIntentClassification (de)":61.92,"MTOPIntentClassification (es)":74.49,"MTOPIntentClassification (fr)":69.12,"MTOPIntentClassification (hi)":64.85,"MTOPIntentClassification (th)":49.36,"MassiveIntentClassification (af)":47.85,"MassiveIntentClassification (am)":33.3,"MassiveIntentClassification (ar)":59.25,"MassiveIntentClassification (az)":45.24,"MassiveIntentClassification (bn)":61.59,"MassiveIntentClassification (cy)":44.92,"MassiveIntentClassification (de)":56.1,"MassiveIntentClassification (el)":46.13,"MassiveIntentClassification (es)":66.35,"MassiveIntentClassification (fa)":51.2,"MassiveIntentClassification (fi)":45.33,"MassiveIntentClassification (fr)":66.95,"MassiveIntentClassification (he)":43.18,"MassiveIntentClassification (hi)":63.54,"MassiveIntentClassification (hu)":44.73,"MassiveIntentClassification (hy)":38.13,"MassiveIntentClassification (id)":64.06,"MassiveIntentClassification (is)":44.35,"MassiveIntentClassification (it)":60.77,"MassiveIntentClassification (ja)":61.22,"MassiveIntentClassification (jv)":50.94,"MassiveIntentClassification (ka)":33.84,"MassiveIntentClassification (km)":37.34,"MassiveIntentClassification (kn)":53.54,"MassiveIntentClassification (ko)":53.36,"MassiveIntentClassification (lv)":46.5,"MassiveIntentClassification (ml)":58.27,"MassiveIntentClassification (mn)":40.28,"MassiveIntentClassification (ms)":59.65,"MassiveIntentClassification (my)":37.42,"MassiveIntentClassification (nl)":52.09,"MassiveIntentClassification (pt)":66.69,"MassiveIntentClassification (ro)":50.53,"MassiveIntentClassification (ru)":58.32,"MassiveIntentClassification (sl)":47.74,"MassiveIntentClassification (sq)":48.94,"MassiveIntentClassification (sw)":49.81,"MassiveIntentClassification (ta)":56.4,"MassiveIntentClassification (te)":54.71,"MassiveIntentClassification (th)":44.43,"MassiveIntentClassification (tl)":50.21,"MassiveIntentClassification (tr)":46.56,"MassiveIntentClassification (ur)":56.75,"MassiveIntentClassification (vi)":64.53,"MassiveIntentClassification (zh-TW)":62.89,"MassiveScenarioClassification (af)":51.47,"MassiveScenarioClassification (am)":34.87,"MassiveScenarioClassification (ar)":65.21,"MassiveScenarioClassification (az)":45.58,"MassiveScenarioClassification (bn)":67.3,"MassiveScenarioClassification (cy)":46.29,"MassiveScenarioClassification (de)":61.74,"MassiveScenarioClassification (el)":48.96,"MassiveScenarioClassification (es)":73.34,"MassiveScenarioClassification (fa)":53.17,"MassiveScenarioClassification (fi)":44.69,"MassiveScenarioClassification (fr)":72.91,"MassiveScenarioClassification (he)":43.1,"MassiveScenarioClassification (hi)":69.27,"MassiveScenarioClassification (hu)":45.16,"MassiveScenarioClassification (hy)":38.73,"MassiveScenarioClassification (id)":70.13,"MassiveScenarioClassification (is)":44.21,"MassiveScenarioClassification (it)":65.57,"MassiveScenarioClassification (ja)":65.76,"MassiveScenarioClassification (jv)":54.79,"MassiveScenarioClassification (ka)":32.99,"MassiveScenarioClassification (km)":39.34,"MassiveScenarioClassification (kn)":60.5,"MassiveScenarioClassification (ko)":55.69,"MassiveScenarioClassification (lv)":44.35,"MassiveScenarioClassification (ml)":65.53,"MassiveScenarioClassification (mn)":38.72,"MassiveScenarioClassification (ms)":64.99,"MassiveScenarioClassification (my)":36.84,"MassiveScenarioClassification (nl)":56.32,"MassiveScenarioClassification (pt)":71.46,"MassiveScenarioClassification (ro)":53.69,"MassiveScenarioClassification (ru)":61.6,"MassiveScenarioClassification (sl)":48.04,"MassiveScenarioClassification (sq)":50.06,"MassiveScenarioClassification (sw)":54.22,"MassiveScenarioClassification (ta)":62.77,"MassiveScenarioClassification (te)":62.59,"MassiveScenarioClassification (th)":45.18,"MassiveScenarioClassification (tl)":52.06,"MassiveScenarioClassification (tr)":47.21,"MassiveScenarioClassification (ur)":64.26,"MassiveScenarioClassification (vi)":70.61,"MassiveScenarioClassification (zh-TW)":70.3} -{"level_0":9,"index":113,"Rank":10,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.59,"AmazonCounterfactualClassification (de)":68.69,"AmazonCounterfactualClassification (ja)":61.61,"AmazonReviewsClassification (de)":33.39,"AmazonReviewsClassification (es)":34.82,"AmazonReviewsClassification (fr)":33.45,"AmazonReviewsClassification (ja)":30.05,"AmazonReviewsClassification (zh)":32.52,"MTOPDomainClassification (de)":78.59,"MTOPDomainClassification (es)":79.24,"MTOPDomainClassification (fr)":76.17,"MTOPDomainClassification (hi)":78.75,"MTOPDomainClassification (th)":77.67,"MTOPIntentClassification (de)":55.29,"MTOPIntentClassification (es)":58.68,"MTOPIntentClassification (fr)":53.26,"MTOPIntentClassification (hi)":59.62,"MTOPIntentClassification (th)":58.8,"MassiveIntentClassification (af)":45.42,"MassiveIntentClassification (am)":37.68,"MassiveIntentClassification (ar)":45.02,"MassiveIntentClassification (az)":48.71,"MassiveIntentClassification (bn)":43.79,"MassiveIntentClassification (cy)":28.76,"MassiveIntentClassification (de)":51.56,"MassiveIntentClassification (el)":56.47,"MassiveIntentClassification (es)":58.28,"MassiveIntentClassification (fa)":59.05,"MassiveIntentClassification (fi)":57.36,"MassiveIntentClassification (fr)":58.8,"MassiveIntentClassification (he)":51.18,"MassiveIntentClassification (hi)":57.06,"MassiveIntentClassification (hu)":58.36,"MassiveIntentClassification (hy)":52.11,"MassiveIntentClassification (id)":58.27,"MassiveIntentClassification (is)":35.81,"MassiveIntentClassification (it)":58.28,"MassiveIntentClassification (ja)":60.78,"MassiveIntentClassification (jv)":31.15,"MassiveIntentClassification (ka)":44.5,"MassiveIntentClassification (km)":40.99,"MassiveIntentClassification (kn)":46.96,"MassiveIntentClassification (ko)":54.73,"MassiveIntentClassification (lv)":54.87,"MassiveIntentClassification (ml)":47.89,"MassiveIntentClassification (mn)":52.23,"MassiveIntentClassification (ms)":54.28,"MassiveIntentClassification (my)":51.96,"MassiveIntentClassification (nl)":59.45,"MassiveIntentClassification (pt)":59.84,"MassiveIntentClassification (ro)":57.04,"MassiveIntentClassification (ru)":58.02,"MassiveIntentClassification (sl)":56.36,"MassiveIntentClassification (sq)":56.48,"MassiveIntentClassification (sw)":33.96,"MassiveIntentClassification (ta)":44.29,"MassiveIntentClassification (te)":47.14,"MassiveIntentClassification (th)":56.86,"MassiveIntentClassification (tl)":35.36,"MassiveIntentClassification (tr)":59.63,"MassiveIntentClassification (ur)":52.79,"MassiveIntentClassification (vi)":54.65,"MassiveIntentClassification (zh-TW)":57.47,"MassiveScenarioClassification (af)":50.86,"MassiveScenarioClassification (am)":41.18,"MassiveScenarioClassification (ar)":50.08,"MassiveScenarioClassification (az)":51.29,"MassiveScenarioClassification (bn)":46.53,"MassiveScenarioClassification (cy)":34.35,"MassiveScenarioClassification (de)":56.4,"MassiveScenarioClassification (el)":61.8,"MassiveScenarioClassification (es)":62.21,"MassiveScenarioClassification (fa)":62.44,"MassiveScenarioClassification (fi)":61.1,"MassiveScenarioClassification (fr)":63.39,"MassiveScenarioClassification (he)":56.29,"MassiveScenarioClassification (hi)":60.63,"MassiveScenarioClassification (hu)":63.29,"MassiveScenarioClassification (hy)":54.88,"MassiveScenarioClassification (id)":61.99,"MassiveScenarioClassification (is)":38.58,"MassiveScenarioClassification (it)":62.35,"MassiveScenarioClassification (ja)":65.17,"MassiveScenarioClassification (jv)":36.13,"MassiveScenarioClassification (ka)":50.27,"MassiveScenarioClassification (km)":44.24,"MassiveScenarioClassification (kn)":47.37,"MassiveScenarioClassification (ko)":58.89,"MassiveScenarioClassification (lv)":56.51,"MassiveScenarioClassification (ml)":50.06,"MassiveScenarioClassification (mn)":55.05,"MassiveScenarioClassification (ms)":59.77,"MassiveScenarioClassification (my)":55.72,"MassiveScenarioClassification (nl)":63.38,"MassiveScenarioClassification (pt)":62.41,"MassiveScenarioClassification (ro)":60.68,"MassiveScenarioClassification (ru)":62.31,"MassiveScenarioClassification (sl)":61.43,"MassiveScenarioClassification (sq)":62.23,"MassiveScenarioClassification (sw)":38.52,"MassiveScenarioClassification (ta)":47.0,"MassiveScenarioClassification (te)":51.02,"MassiveScenarioClassification (th)":63.23,"MassiveScenarioClassification (tl)":38.72,"MassiveScenarioClassification (tr)":64.49,"MassiveScenarioClassification (ur)":56.8,"MassiveScenarioClassification (vi)":57.06,"MassiveScenarioClassification (zh-TW)":63.37} -{"level_0":10,"index":72,"Rank":11,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.6,"AmazonCounterfactualClassification (de)":66.03,"AmazonCounterfactualClassification (ja)":58.77,"AmazonReviewsClassification (de)":30.45,"AmazonReviewsClassification (es)":40.8,"AmazonReviewsClassification (fr)":35.12,"AmazonReviewsClassification (ja)":32.07,"AmazonReviewsClassification (zh)":38.12,"MTOPDomainClassification (de)":74.64,"MTOPDomainClassification (es)":92.07,"MTOPDomainClassification (fr)":69.24,"MTOPDomainClassification (hi)":88.54,"MTOPDomainClassification (th)":55.63,"MTOPIntentClassification (de)":53.4,"MTOPIntentClassification (es)":71.33,"MTOPIntentClassification (fr)":51.25,"MTOPIntentClassification (hi)":66.73,"MTOPIntentClassification (th)":42.13,"MassiveIntentClassification (af)":44.98,"MassiveIntentClassification (am)":25.35,"MassiveIntentClassification (ar)":57.08,"MassiveIntentClassification (az)":39.11,"MassiveIntentClassification (bn)":61.37,"MassiveIntentClassification (cy)":42.97,"MassiveIntentClassification (de)":50.93,"MassiveIntentClassification (el)":40.09,"MassiveIntentClassification (es)":63.81,"MassiveIntentClassification (fa)":49.06,"MassiveIntentClassification (fi)":42.58,"MassiveIntentClassification (fr)":43.21,"MassiveIntentClassification (he)":37.15,"MassiveIntentClassification (hi)":62.89,"MassiveIntentClassification (hu)":41.62,"MassiveIntentClassification (hy)":32.98,"MassiveIntentClassification (id)":62.11,"MassiveIntentClassification (is)":41.04,"MassiveIntentClassification (it)":55.83,"MassiveIntentClassification (ja)":58.83,"MassiveIntentClassification (jv)":49.31,"MassiveIntentClassification (ka)":26.43,"MassiveIntentClassification (km)":28.77,"MassiveIntentClassification (kn)":52.33,"MassiveIntentClassification (ko)":46.93,"MassiveIntentClassification (lv)":44.26,"MassiveIntentClassification (ml)":57.75,"MassiveIntentClassification (mn)":33.31,"MassiveIntentClassification (ms)":55.7,"MassiveIntentClassification (my)":27.39,"MassiveIntentClassification (nl)":48.34,"MassiveIntentClassification (pt)":64.74,"MassiveIntentClassification (ro)":48.41,"MassiveIntentClassification (ru)":52.99,"MassiveIntentClassification (sl)":44.77,"MassiveIntentClassification (sq)":45.45,"MassiveIntentClassification (sw)":46.46,"MassiveIntentClassification (ta)":55.46,"MassiveIntentClassification (te)":51.41,"MassiveIntentClassification (th)":39.2,"MassiveIntentClassification (tl)":48.53,"MassiveIntentClassification (tr)":39.51,"MassiveIntentClassification (ur)":54.72,"MassiveIntentClassification (vi)":62.01,"MassiveIntentClassification (zh-TW)":62.56,"MassiveScenarioClassification (af)":50.47,"MassiveScenarioClassification (am)":27.22,"MassiveScenarioClassification (ar)":65.43,"MassiveScenarioClassification (az)":40.74,"MassiveScenarioClassification (bn)":67.65,"MassiveScenarioClassification (cy)":43.94,"MassiveScenarioClassification (de)":56.67,"MassiveScenarioClassification (el)":41.81,"MassiveScenarioClassification (es)":71.78,"MassiveScenarioClassification (fa)":49.96,"MassiveScenarioClassification (fi)":41.01,"MassiveScenarioClassification (fr)":49.78,"MassiveScenarioClassification (he)":36.69,"MassiveScenarioClassification (hi)":69.28,"MassiveScenarioClassification (hu)":44.31,"MassiveScenarioClassification (hy)":33.64,"MassiveScenarioClassification (id)":68.98,"MassiveScenarioClassification (is)":42.1,"MassiveScenarioClassification (it)":60.27,"MassiveScenarioClassification (ja)":62.48,"MassiveScenarioClassification (jv)":54.68,"MassiveScenarioClassification (ka)":27.22,"MassiveScenarioClassification (km)":32.14,"MassiveScenarioClassification (kn)":57.95,"MassiveScenarioClassification (ko)":47.95,"MassiveScenarioClassification (lv)":42.76,"MassiveScenarioClassification (ml)":62.84,"MassiveScenarioClassification (mn)":33.21,"MassiveScenarioClassification (ms)":62.57,"MassiveScenarioClassification (my)":28.84,"MassiveScenarioClassification (nl)":52.85,"MassiveScenarioClassification (pt)":70.24,"MassiveScenarioClassification (ro)":52.73,"MassiveScenarioClassification (ru)":54.26,"MassiveScenarioClassification (sl)":46.89,"MassiveScenarioClassification (sq)":47.16,"MassiveScenarioClassification (sw)":51.2,"MassiveScenarioClassification (ta)":61.84,"MassiveScenarioClassification (te)":59.79,"MassiveScenarioClassification (th)":41.62,"MassiveScenarioClassification (tl)":50.47,"MassiveScenarioClassification (tr)":43.41,"MassiveScenarioClassification (ur)":60.15,"MassiveScenarioClassification (vi)":68.99,"MassiveScenarioClassification (zh-TW)":71.7} -{"level_0":11,"index":73,"Rank":12,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.37,"AmazonCounterfactualClassification (de)":66.42,"AmazonCounterfactualClassification (ja)":56.86,"AmazonReviewsClassification (de)":26.85,"AmazonReviewsClassification (es)":38.97,"AmazonReviewsClassification (fr)":26.85,"AmazonReviewsClassification (ja)":28.31,"AmazonReviewsClassification (zh)":35.7,"MTOPDomainClassification (de)":68.42,"MTOPDomainClassification (es)":88.21,"MTOPDomainClassification (fr)":34.99,"MTOPDomainClassification (hi)":84.23,"MTOPDomainClassification (th)":53.17,"MTOPIntentClassification (de)":49.17,"MTOPIntentClassification (es)":65.72,"MTOPIntentClassification (fr)":15.76,"MTOPIntentClassification (hi)":61.88,"MTOPIntentClassification (th)":41.67,"MassiveIntentClassification (af)":43.29,"MassiveIntentClassification (am)":23.21,"MassiveIntentClassification (ar)":53.38,"MassiveIntentClassification (az)":39.56,"MassiveIntentClassification (bn)":56.74,"MassiveIntentClassification (cy)":40.0,"MassiveIntentClassification (de)":45.82,"MassiveIntentClassification (el)":37.87,"MassiveIntentClassification (es)":61.17,"MassiveIntentClassification (fa)":45.65,"MassiveIntentClassification (fi)":40.28,"MassiveIntentClassification (fr)":15.09,"MassiveIntentClassification (he)":32.23,"MassiveIntentClassification (hi)":59.46,"MassiveIntentClassification (hu)":40.91,"MassiveIntentClassification (hy)":29.94,"MassiveIntentClassification (id)":59.14,"MassiveIntentClassification (is)":39.62,"MassiveIntentClassification (it)":51.77,"MassiveIntentClassification (ja)":53.75,"MassiveIntentClassification (jv)":46.29,"MassiveIntentClassification (ka)":25.11,"MassiveIntentClassification (km)":27.22,"MassiveIntentClassification (kn)":47.97,"MassiveIntentClassification (ko)":40.54,"MassiveIntentClassification (lv)":43.14,"MassiveIntentClassification (ml)":53.69,"MassiveIntentClassification (mn)":33.37,"MassiveIntentClassification (ms)":51.94,"MassiveIntentClassification (my)":25.32,"MassiveIntentClassification (nl)":44.03,"MassiveIntentClassification (pt)":61.74,"MassiveIntentClassification (ro)":45.73,"MassiveIntentClassification (ru)":47.61,"MassiveIntentClassification (sl)":42.83,"MassiveIntentClassification (sq)":43.61,"MassiveIntentClassification (sw)":45.55,"MassiveIntentClassification (ta)":51.24,"MassiveIntentClassification (te)":47.43,"MassiveIntentClassification (th)":36.88,"MassiveIntentClassification (tl)":45.93,"MassiveIntentClassification (tr)":38.59,"MassiveIntentClassification (ur)":51.85,"MassiveIntentClassification (vi)":58.72,"MassiveIntentClassification (zh-TW)":59.95,"MassiveScenarioClassification (af)":47.42,"MassiveScenarioClassification (am)":24.71,"MassiveScenarioClassification (ar)":62.09,"MassiveScenarioClassification (az)":39.25,"MassiveScenarioClassification (bn)":63.37,"MassiveScenarioClassification (cy)":39.17,"MassiveScenarioClassification (de)":50.71,"MassiveScenarioClassification (el)":39.47,"MassiveScenarioClassification (es)":68.31,"MassiveScenarioClassification (fa)":45.65,"MassiveScenarioClassification (fi)":38.95,"MassiveScenarioClassification (fr)":21.67,"MassiveScenarioClassification (he)":32.13,"MassiveScenarioClassification (hi)":65.57,"MassiveScenarioClassification (hu)":42.97,"MassiveScenarioClassification (hy)":32.13,"MassiveScenarioClassification (id)":65.11,"MassiveScenarioClassification (is)":40.84,"MassiveScenarioClassification (it)":54.55,"MassiveScenarioClassification (ja)":57.15,"MassiveScenarioClassification (jv)":49.3,"MassiveScenarioClassification (ka)":25.86,"MassiveScenarioClassification (km)":31.18,"MassiveScenarioClassification (kn)":53.01,"MassiveScenarioClassification (ko)":40.25,"MassiveScenarioClassification (lv)":41.88,"MassiveScenarioClassification (ml)":59.08,"MassiveScenarioClassification (mn)":33.34,"MassiveScenarioClassification (ms)":57.45,"MassiveScenarioClassification (my)":27.2,"MassiveScenarioClassification (nl)":48.42,"MassiveScenarioClassification (pt)":66.41,"MassiveScenarioClassification (ro)":50.08,"MassiveScenarioClassification (ru)":49.94,"MassiveScenarioClassification (sl)":43.43,"MassiveScenarioClassification (sq)":44.08,"MassiveScenarioClassification (sw)":49.53,"MassiveScenarioClassification (ta)":56.79,"MassiveScenarioClassification (te)":54.01,"MassiveScenarioClassification (th)":38.58,"MassiveScenarioClassification (tl)":48.07,"MassiveScenarioClassification (tr)":40.65,"MassiveScenarioClassification (ur)":57.75,"MassiveScenarioClassification (vi)":65.83,"MassiveScenarioClassification (zh-TW)":69.64} -{"level_0":12,"index":99,"Rank":13,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":44.78,"AmazonCounterfactualClassification (de)":68.14,"AmazonCounterfactualClassification (ja)":65.39,"AmazonReviewsClassification (de)":35.03,"AmazonReviewsClassification (es)":36.24,"AmazonReviewsClassification (fr)":35.7,"AmazonReviewsClassification (ja)":31.08,"AmazonReviewsClassification (zh)":33.89,"MTOPDomainClassification (de)":86.19,"MTOPDomainClassification (es)":87.75,"MTOPDomainClassification (fr)":84.61,"MTOPDomainClassification (hi)":76.41,"MTOPDomainClassification (th)":73.62,"MTOPIntentClassification (de)":59.21,"MTOPIntentClassification (es)":57.21,"MTOPIntentClassification (fr)":53.41,"MTOPIntentClassification (hi)":45.54,"MTOPIntentClassification (th)":47.73,"MassiveIntentClassification (af)":40.02,"MassiveIntentClassification (am)":2.35,"MassiveIntentClassification (ar)":43.14,"MassiveIntentClassification (az)":25.6,"MassiveIntentClassification (bn)":4.84,"MassiveIntentClassification (cy)":15.43,"MassiveIntentClassification (de)":51.57,"MassiveIntentClassification (el)":49.65,"MassiveIntentClassification (es)":56.57,"MassiveIntentClassification (fa)":55.36,"MassiveIntentClassification (fi)":45.72,"MassiveIntentClassification (fr)":57.02,"MassiveIntentClassification (he)":46.74,"MassiveIntentClassification (hi)":48.55,"MassiveIntentClassification (hu)":50.65,"MassiveIntentClassification (hy)":40.79,"MassiveIntentClassification (id)":56.0,"MassiveIntentClassification (is)":16.08,"MassiveIntentClassification (it)":57.65,"MassiveIntentClassification (ja)":55.33,"MassiveIntentClassification (jv)":28.16,"MassiveIntentClassification (ka)":29.41,"MassiveIntentClassification (km)":4.79,"MassiveIntentClassification (kn)":3.37,"MassiveIntentClassification (ko)":49.97,"MassiveIntentClassification (lv)":44.31,"MassiveIntentClassification (ml)":3.24,"MassiveIntentClassification (mn)":40.37,"MassiveIntentClassification (ms)":47.97,"MassiveIntentClassification (my)":38.48,"MassiveIntentClassification (nl)":58.29,"MassiveIntentClassification (pt)":58.63,"MassiveIntentClassification (ro)":50.63,"MassiveIntentClassification (ru)":57.96,"MassiveIntentClassification (sl)":50.66,"MassiveIntentClassification (sq)":50.25,"MassiveIntentClassification (sw)":19.29,"MassiveIntentClassification (ta)":3.79,"MassiveIntentClassification (te)":3.36,"MassiveIntentClassification (th)":45.28,"MassiveIntentClassification (tl)":28.44,"MassiveIntentClassification (tr)":50.47,"MassiveIntentClassification (ur)":46.03,"MassiveIntentClassification (vi)":45.25,"MassiveIntentClassification (zh-TW)":54.96,"MassiveScenarioClassification (af)":53.67,"MassiveScenarioClassification (am)":7.72,"MassiveScenarioClassification (ar)":52.19,"MassiveScenarioClassification (az)":34.75,"MassiveScenarioClassification (bn)":10.65,"MassiveScenarioClassification (cy)":21.24,"MassiveScenarioClassification (de)":61.4,"MassiveScenarioClassification (el)":60.68,"MassiveScenarioClassification (es)":64.61,"MassiveScenarioClassification (fa)":59.24,"MassiveScenarioClassification (fi)":54.66,"MassiveScenarioClassification (fr)":65.2,"MassiveScenarioClassification (he)":54.74,"MassiveScenarioClassification (hi)":55.99,"MassiveScenarioClassification (hu)":61.2,"MassiveScenarioClassification (hy)":49.63,"MassiveScenarioClassification (id)":65.25,"MassiveScenarioClassification (is)":22.6,"MassiveScenarioClassification (it)":64.63,"MassiveScenarioClassification (ja)":62.32,"MassiveScenarioClassification (jv)":35.77,"MassiveScenarioClassification (ka)":39.08,"MassiveScenarioClassification (km)":9.24,"MassiveScenarioClassification (kn)":8.28,"MassiveScenarioClassification (ko)":57.6,"MassiveScenarioClassification (lv)":51.72,"MassiveScenarioClassification (ml)":8.25,"MassiveScenarioClassification (mn)":47.21,"MassiveScenarioClassification (ms)":55.65,"MassiveScenarioClassification (my)":43.31,"MassiveScenarioClassification (nl)":67.49,"MassiveScenarioClassification (pt)":64.26,"MassiveScenarioClassification (ro)":58.03,"MassiveScenarioClassification (ru)":65.41,"MassiveScenarioClassification (sl)":59.36,"MassiveScenarioClassification (sq)":62.69,"MassiveScenarioClassification (sw)":25.12,"MassiveScenarioClassification (ta)":8.67,"MassiveScenarioClassification (te)":7.82,"MassiveScenarioClassification (th)":54.65,"MassiveScenarioClassification (tl)":36.09,"MassiveScenarioClassification (tr)":60.89,"MassiveScenarioClassification (ur)":54.71,"MassiveScenarioClassification (vi)":55.15,"MassiveScenarioClassification (zh-TW)":62.89} -{"level_0":13,"index":5,"Rank":14,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":42.45,"AmazonCounterfactualClassification (de)":67.82,"AmazonCounterfactualClassification (ja)":68.76,"AmazonReviewsClassification (de)":31.07,"AmazonReviewsClassification (es)":32.72,"AmazonReviewsClassification (fr)":31.12,"AmazonReviewsClassification (ja)":28.94,"AmazonReviewsClassification (zh)":30.89,"MTOPDomainClassification (de)":74.08,"MTOPDomainClassification (es)":73.47,"MTOPDomainClassification (fr)":72.26,"MTOPDomainClassification (hi)":72.95,"MTOPDomainClassification (th)":72.68,"MTOPIntentClassification (de)":51.62,"MTOPIntentClassification (es)":52.75,"MTOPIntentClassification (fr)":50.12,"MTOPIntentClassification (hi)":45.55,"MTOPIntentClassification (th)":50.07,"MassiveIntentClassification (af)":38.01,"MassiveIntentClassification (am)":12.7,"MassiveIntentClassification (ar)":37.16,"MassiveIntentClassification (az)":19.98,"MassiveIntentClassification (bn)":42.51,"MassiveIntentClassification (cy)":17.33,"MassiveIntentClassification (de)":44.79,"MassiveIntentClassification (el)":46.71,"MassiveIntentClassification (es)":45.44,"MassiveIntentClassification (fa)":45.01,"MassiveIntentClassification (fi)":45.94,"MassiveIntentClassification (fr)":46.13,"MassiveIntentClassification (he)":42.55,"MassiveIntentClassification (hi)":40.2,"MassiveIntentClassification (hu)":42.77,"MassiveIntentClassification (hy)":28.07,"MassiveIntentClassification (id)":45.81,"MassiveIntentClassification (is)":39.86,"MassiveIntentClassification (it)":48.25,"MassiveIntentClassification (ja)":45.3,"MassiveIntentClassification (jv)":24.3,"MassiveIntentClassification (ka)":22.7,"MassiveIntentClassification (km)":22.48,"MassiveIntentClassification (kn)":4.32,"MassiveIntentClassification (ko)":44.26,"MassiveIntentClassification (lv)":39.75,"MassiveIntentClassification (ml)":41.33,"MassiveIntentClassification (mn)":16.2,"MassiveIntentClassification (ms)":43.23,"MassiveIntentClassification (my)":25.37,"MassiveIntentClassification (nl)":45.0,"MassiveIntentClassification (pt)":48.55,"MassiveIntentClassification (ro)":44.3,"MassiveIntentClassification (ru)":44.29,"MassiveIntentClassification (sl)":44.72,"MassiveIntentClassification (sq)":46.12,"MassiveIntentClassification (sw)":31.89,"MassiveIntentClassification (ta)":29.63,"MassiveIntentClassification (te)":36.03,"MassiveIntentClassification (th)":43.39,"MassiveIntentClassification (tl)":29.73,"MassiveIntentClassification (tr)":43.93,"MassiveIntentClassification (ur)":26.11,"MassiveIntentClassification (vi)":44.33,"MassiveIntentClassification (zh-TW)":32.93,"MassiveScenarioClassification (af)":47.1,"MassiveScenarioClassification (am)":17.7,"MassiveScenarioClassification (ar)":45.21,"MassiveScenarioClassification (az)":28.21,"MassiveScenarioClassification (bn)":50.52,"MassiveScenarioClassification (cy)":22.58,"MassiveScenarioClassification (de)":54.34,"MassiveScenarioClassification (el)":55.47,"MassiveScenarioClassification (es)":52.77,"MassiveScenarioClassification (fa)":52.5,"MassiveScenarioClassification (fi)":52.63,"MassiveScenarioClassification (fr)":54.32,"MassiveScenarioClassification (he)":52.41,"MassiveScenarioClassification (hi)":47.37,"MassiveScenarioClassification (hu)":53.43,"MassiveScenarioClassification (hy)":33.57,"MassiveScenarioClassification (id)":54.38,"MassiveScenarioClassification (is)":49.78,"MassiveScenarioClassification (it)":54.84,"MassiveScenarioClassification (ja)":54.12,"MassiveScenarioClassification (jv)":32.71,"MassiveScenarioClassification (ka)":26.92,"MassiveScenarioClassification (km)":27.23,"MassiveScenarioClassification (kn)":10.06,"MassiveScenarioClassification (ko)":52.01,"MassiveScenarioClassification (lv)":44.82,"MassiveScenarioClassification (ml)":49.1,"MassiveScenarioClassification (mn)":21.51,"MassiveScenarioClassification (ms)":53.6,"MassiveScenarioClassification (my)":29.72,"MassiveScenarioClassification (nl)":53.33,"MassiveScenarioClassification (pt)":53.41,"MassiveScenarioClassification (ro)":50.48,"MassiveScenarioClassification (ru)":51.84,"MassiveScenarioClassification (sl)":51.29,"MassiveScenarioClassification (sq)":55.65,"MassiveScenarioClassification (sw)":42.04,"MassiveScenarioClassification (ta)":36.72,"MassiveScenarioClassification (te)":42.08,"MassiveScenarioClassification (th)":52.15,"MassiveScenarioClassification (tl)":37.34,"MassiveScenarioClassification (tr)":52.56,"MassiveScenarioClassification (ur)":32.6,"MassiveScenarioClassification (vi)":50.97,"MassiveScenarioClassification (zh-TW)":42.32} -{"level_0":14,"index":39,"Rank":15,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.64,"AmazonCounterfactualClassification (de)":56.88,"AmazonCounterfactualClassification (ja)":54.65,"AmazonReviewsClassification (de)":24.79,"AmazonReviewsClassification (es)":26.64,"AmazonReviewsClassification (fr)":26.39,"AmazonReviewsClassification (ja)":22.08,"AmazonReviewsClassification (zh)":24.27,"MTOPDomainClassification (de)":62.73,"MTOPDomainClassification (es)":67.55,"MTOPDomainClassification (fr)":65.35,"MTOPDomainClassification (hi)":45.37,"MTOPDomainClassification (th)":55.28,"MTOPIntentClassification (de)":49.56,"MTOPIntentClassification (es)":49.94,"MTOPIntentClassification (fr)":46.33,"MTOPIntentClassification (hi)":32.21,"MTOPIntentClassification (th)":43.63,"MassiveIntentClassification (af)":40.55,"MassiveIntentClassification (am)":24.18,"MassiveIntentClassification (ar)":30.13,"MassiveIntentClassification (az)":35.88,"MassiveIntentClassification (bn)":29.17,"MassiveIntentClassification (cy)":41.79,"MassiveIntentClassification (de)":42.07,"MassiveIntentClassification (el)":36.25,"MassiveIntentClassification (es)":42.68,"MassiveIntentClassification (fa)":35.59,"MassiveIntentClassification (fi)":40.04,"MassiveIntentClassification (fr)":43.44,"MassiveIntentClassification (he)":31.59,"MassiveIntentClassification (hi)":27.04,"MassiveIntentClassification (hu)":38.45,"MassiveIntentClassification (hy)":27.98,"MassiveIntentClassification (id)":43.97,"MassiveIntentClassification (is)":40.3,"MassiveIntentClassification (it)":45.47,"MassiveIntentClassification (ja)":45.61,"MassiveIntentClassification (jv)":38.67,"MassiveIntentClassification (ka)":25.65,"MassiveIntentClassification (km)":28.3,"MassiveIntentClassification (kn)":23.48,"MassiveIntentClassification (ko)":36.56,"MassiveIntentClassification (lv)":41.85,"MassiveIntentClassification (ml)":24.91,"MassiveIntentClassification (mn)":29.86,"MassiveIntentClassification (ms)":42.42,"MassiveIntentClassification (my)":25.13,"MassiveIntentClassification (nl)":43.62,"MassiveIntentClassification (pt)":45.21,"MassiveIntentClassification (ro)":41.81,"MassiveIntentClassification (ru)":35.97,"MassiveIntentClassification (sl)":40.61,"MassiveIntentClassification (sq)":42.76,"MassiveIntentClassification (sw)":41.12,"MassiveIntentClassification (ta)":24.6,"MassiveIntentClassification (te)":25.04,"MassiveIntentClassification (th)":35.4,"MassiveIntentClassification (tl)":41.19,"MassiveIntentClassification (tr)":36.41,"MassiveIntentClassification (ur)":25.93,"MassiveIntentClassification (vi)":38.8,"MassiveIntentClassification (zh-TW)":42.31,"MassiveScenarioClassification (af)":43.25,"MassiveScenarioClassification (am)":25.3,"MassiveScenarioClassification (ar)":32.07,"MassiveScenarioClassification (az)":36.68,"MassiveScenarioClassification (bn)":29.57,"MassiveScenarioClassification (cy)":42.1,"MassiveScenarioClassification (de)":43.21,"MassiveScenarioClassification (el)":36.5,"MassiveScenarioClassification (es)":44.08,"MassiveScenarioClassification (fa)":32.61,"MassiveScenarioClassification (fi)":40.36,"MassiveScenarioClassification (fr)":45.07,"MassiveScenarioClassification (he)":32.18,"MassiveScenarioClassification (hi)":26.9,"MassiveScenarioClassification (hu)":40.38,"MassiveScenarioClassification (hy)":28.38,"MassiveScenarioClassification (id)":44.36,"MassiveScenarioClassification (is)":39.29,"MassiveScenarioClassification (it)":46.47,"MassiveScenarioClassification (ja)":46.26,"MassiveScenarioClassification (jv)":41.13,"MassiveScenarioClassification (ka)":24.73,"MassiveScenarioClassification (km)":29.74,"MassiveScenarioClassification (kn)":23.85,"MassiveScenarioClassification (ko)":36.57,"MassiveScenarioClassification (lv)":40.93,"MassiveScenarioClassification (ml)":25.53,"MassiveScenarioClassification (mn)":29.11,"MassiveScenarioClassification (ms)":43.79,"MassiveScenarioClassification (my)":27.27,"MassiveScenarioClassification (nl)":45.36,"MassiveScenarioClassification (pt)":45.9,"MassiveScenarioClassification (ro)":44.12,"MassiveScenarioClassification (ru)":32.76,"MassiveScenarioClassification (sl)":40.5,"MassiveScenarioClassification (sq)":42.52,"MassiveScenarioClassification (sw)":43.0,"MassiveScenarioClassification (ta)":28.33,"MassiveScenarioClassification (te)":26.59,"MassiveScenarioClassification (th)":36.79,"MassiveScenarioClassification (tl)":42.57,"MassiveScenarioClassification (tr)":37.09,"MassiveScenarioClassification (ur)":28.84,"MassiveScenarioClassification (vi)":37.36,"MassiveScenarioClassification (zh-TW)":44.42} -{"level_0":15,"index":40,"Rank":16,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.66,"AmazonCounterfactualClassification (de)":59.08,"AmazonCounterfactualClassification (ja)":56.42,"AmazonReviewsClassification (de)":24.52,"AmazonReviewsClassification (es)":29.1,"AmazonReviewsClassification (fr)":27.4,"AmazonReviewsClassification (ja)":21.72,"AmazonReviewsClassification (zh)":23.98,"MTOPDomainClassification (de)":60.37,"MTOPDomainClassification (es)":67.37,"MTOPDomainClassification (fr)":63.13,"MTOPDomainClassification (hi)":47.05,"MTOPDomainClassification (th)":52.28,"MTOPIntentClassification (de)":45.07,"MTOPIntentClassification (es)":48.81,"MTOPIntentClassification (fr)":44.34,"MTOPIntentClassification (hi)":34.2,"MTOPIntentClassification (th)":43.11,"MassiveIntentClassification (af)":37.79,"MassiveIntentClassification (am)":23.72,"MassiveIntentClassification (ar)":29.64,"MassiveIntentClassification (az)":39.48,"MassiveIntentClassification (bn)":26.55,"MassiveIntentClassification (cy)":38.78,"MassiveIntentClassification (de)":40.39,"MassiveIntentClassification (el)":37.29,"MassiveIntentClassification (es)":41.18,"MassiveIntentClassification (fa)":36.42,"MassiveIntentClassification (fi)":38.76,"MassiveIntentClassification (fr)":43.67,"MassiveIntentClassification (he)":31.98,"MassiveIntentClassification (hi)":28.04,"MassiveIntentClassification (hu)":38.14,"MassiveIntentClassification (hy)":26.05,"MassiveIntentClassification (id)":41.16,"MassiveIntentClassification (is)":38.63,"MassiveIntentClassification (it)":44.04,"MassiveIntentClassification (ja)":46.21,"MassiveIntentClassification (jv)":37.61,"MassiveIntentClassification (ka)":24.47,"MassiveIntentClassification (km)":26.24,"MassiveIntentClassification (kn)":17.83,"MassiveIntentClassification (ko)":37.27,"MassiveIntentClassification (lv)":40.93,"MassiveIntentClassification (ml)":17.89,"MassiveIntentClassification (mn)":32.98,"MassiveIntentClassification (ms)":40.91,"MassiveIntentClassification (my)":17.83,"MassiveIntentClassification (nl)":41.76,"MassiveIntentClassification (pt)":44.54,"MassiveIntentClassification (ro)":39.97,"MassiveIntentClassification (ru)":37.46,"MassiveIntentClassification (sl)":38.29,"MassiveIntentClassification (sq)":40.95,"MassiveIntentClassification (sw)":38.33,"MassiveIntentClassification (ta)":19.03,"MassiveIntentClassification (te)":19.38,"MassiveIntentClassification (th)":34.09,"MassiveIntentClassification (tl)":40.29,"MassiveIntentClassification (tr)":38.86,"MassiveIntentClassification (ur)":27.83,"MassiveIntentClassification (vi)":38.71,"MassiveIntentClassification (zh-TW)":42.32,"MassiveScenarioClassification (af)":40.25,"MassiveScenarioClassification (am)":25.69,"MassiveScenarioClassification (ar)":32.4,"MassiveScenarioClassification (az)":40.53,"MassiveScenarioClassification (bn)":27.23,"MassiveScenarioClassification (cy)":38.7,"MassiveScenarioClassification (de)":41.36,"MassiveScenarioClassification (el)":38.44,"MassiveScenarioClassification (es)":44.18,"MassiveScenarioClassification (fa)":34.83,"MassiveScenarioClassification (fi)":40.56,"MassiveScenarioClassification (fr)":45.92,"MassiveScenarioClassification (he)":32.08,"MassiveScenarioClassification (hi)":28.37,"MassiveScenarioClassification (hu)":39.49,"MassiveScenarioClassification (hy)":25.9,"MassiveScenarioClassification (id)":40.96,"MassiveScenarioClassification (is)":38.56,"MassiveScenarioClassification (it)":46.59,"MassiveScenarioClassification (ja)":46.25,"MassiveScenarioClassification (jv)":39.66,"MassiveScenarioClassification (ka)":25.28,"MassiveScenarioClassification (km)":28.97,"MassiveScenarioClassification (kn)":19.27,"MassiveScenarioClassification (ko)":35.73,"MassiveScenarioClassification (lv)":39.57,"MassiveScenarioClassification (ml)":19.9,"MassiveScenarioClassification (mn)":32.43,"MassiveScenarioClassification (ms)":42.32,"MassiveScenarioClassification (my)":20.86,"MassiveScenarioClassification (nl)":43.59,"MassiveScenarioClassification (pt)":46.31,"MassiveScenarioClassification (ro)":42.53,"MassiveScenarioClassification (ru)":35.95,"MassiveScenarioClassification (sl)":38.69,"MassiveScenarioClassification (sq)":40.47,"MassiveScenarioClassification (sw)":39.55,"MassiveScenarioClassification (ta)":22.88,"MassiveScenarioClassification (te)":20.51,"MassiveScenarioClassification (th)":34.93,"MassiveScenarioClassification (tl)":40.75,"MassiveScenarioClassification (tr)":39.07,"MassiveScenarioClassification (ur)":29.75,"MassiveScenarioClassification (vi)":38.02,"MassiveScenarioClassification (zh-TW)":45.18} -{"level_0":16,"index":97,"Rank":17,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":31.93,"AmazonCounterfactualClassification (de)":57.1,"AmazonCounterfactualClassification (ja)":59.91,"AmazonReviewsClassification (de)":25.91,"AmazonReviewsClassification (es)":27.63,"AmazonReviewsClassification (fr)":27.54,"AmazonReviewsClassification (ja)":23.57,"AmazonReviewsClassification (zh)":22.99,"MTOPDomainClassification (de)":72.04,"MTOPDomainClassification (es)":72.99,"MTOPDomainClassification (fr)":75.59,"MTOPDomainClassification (hi)":40.36,"MTOPDomainClassification (th)":17.1,"MTOPIntentClassification (de)":43.41,"MTOPIntentClassification (es)":41.88,"MTOPIntentClassification (fr)":38.94,"MTOPIntentClassification (hi)":17.75,"MTOPIntentClassification (th)":5.63,"MassiveIntentClassification (af)":38.94,"MassiveIntentClassification (am)":2.45,"MassiveIntentClassification (ar)":20.94,"MassiveIntentClassification (az)":34.25,"MassiveIntentClassification (bn)":13.67,"MassiveIntentClassification (cy)":35.71,"MassiveIntentClassification (de)":44.17,"MassiveIntentClassification (el)":28.7,"MassiveIntentClassification (es)":40.91,"MassiveIntentClassification (fa)":23.52,"MassiveIntentClassification (fi)":39.27,"MassiveIntentClassification (fr)":44.82,"MassiveIntentClassification (he)":23.65,"MassiveIntentClassification (hi)":17.98,"MassiveIntentClassification (hu)":38.0,"MassiveIntentClassification (hy)":8.69,"MassiveIntentClassification (id)":39.66,"MassiveIntentClassification (is)":35.14,"MassiveIntentClassification (it)":43.17,"MassiveIntentClassification (ja)":30.94,"MassiveIntentClassification (jv)":36.69,"MassiveIntentClassification (ka)":9.17,"MassiveIntentClassification (km)":4.99,"MassiveIntentClassification (kn)":3.08,"MassiveIntentClassification (ko)":19.97,"MassiveIntentClassification (lv)":38.61,"MassiveIntentClassification (ml)":2.85,"MassiveIntentClassification (mn)":23.25,"MassiveIntentClassification (ms)":36.21,"MassiveIntentClassification (my)":4.38,"MassiveIntentClassification (nl)":41.85,"MassiveIntentClassification (pt)":45.12,"MassiveIntentClassification (ro)":41.71,"MassiveIntentClassification (ru)":26.33,"MassiveIntentClassification (sl)":38.52,"MassiveIntentClassification (sq)":41.62,"MassiveIntentClassification (sw)":35.28,"MassiveIntentClassification (ta)":13.1,"MassiveIntentClassification (te)":2.56,"MassiveIntentClassification (th)":10.54,"MassiveIntentClassification (tl)":38.56,"MassiveIntentClassification (tr)":35.9,"MassiveIntentClassification (ur)":16.18,"MassiveIntentClassification (vi)":37.38,"MassiveIntentClassification (zh-TW)":22.39,"MassiveScenarioClassification (af)":45.71,"MassiveScenarioClassification (am)":7.41,"MassiveScenarioClassification (ar)":27.62,"MassiveScenarioClassification (az)":39.58,"MassiveScenarioClassification (bn)":18.98,"MassiveScenarioClassification (cy)":41.4,"MassiveScenarioClassification (de)":52.07,"MassiveScenarioClassification (el)":35.51,"MassiveScenarioClassification (es)":50.74,"MassiveScenarioClassification (fa)":29.0,"MassiveScenarioClassification (fi)":45.8,"MassiveScenarioClassification (fr)":53.76,"MassiveScenarioClassification (he)":25.68,"MassiveScenarioClassification (hi)":23.02,"MassiveScenarioClassification (hu)":44.09,"MassiveScenarioClassification (hy)":14.83,"MassiveScenarioClassification (id)":44.35,"MassiveScenarioClassification (is)":43.08,"MassiveScenarioClassification (it)":51.71,"MassiveScenarioClassification (ja)":36.75,"MassiveScenarioClassification (jv)":44.57,"MassiveScenarioClassification (ka)":14.84,"MassiveScenarioClassification (km)":9.75,"MassiveScenarioClassification (kn)":8.32,"MassiveScenarioClassification (ko)":25.72,"MassiveScenarioClassification (lv)":42.75,"MassiveScenarioClassification (ml)":7.25,"MassiveScenarioClassification (mn)":29.03,"MassiveScenarioClassification (ms)":44.65,"MassiveScenarioClassification (my)":10.07,"MassiveScenarioClassification (nl)":49.15,"MassiveScenarioClassification (pt)":53.0,"MassiveScenarioClassification (ro)":49.97,"MassiveScenarioClassification (ru)":28.75,"MassiveScenarioClassification (sl)":42.26,"MassiveScenarioClassification (sq)":49.14,"MassiveScenarioClassification (sw)":43.18,"MassiveScenarioClassification (ta)":19.38,"MassiveScenarioClassification (te)":7.74,"MassiveScenarioClassification (th)":18.32,"MassiveScenarioClassification (tl)":48.31,"MassiveScenarioClassification (tr)":41.79,"MassiveScenarioClassification (ur)":24.46,"MassiveScenarioClassification (vi)":40.94,"MassiveScenarioClassification (zh-TW)":31.16} -{"level_0":17,"index":34,"Rank":18,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":31.78,"AmazonCounterfactualClassification (de)":54.33,"AmazonCounterfactualClassification (ja)":56.34,"AmazonReviewsClassification (de)":27.2,"AmazonReviewsClassification (es)":34.88,"AmazonReviewsClassification (fr)":31.56,"AmazonReviewsClassification (ja)":22.71,"AmazonReviewsClassification (zh)":22.35,"MTOPDomainClassification (de)":74.86,"MTOPDomainClassification (es)":77.09,"MTOPDomainClassification (fr)":79.8,"MTOPDomainClassification (hi)":32.79,"MTOPDomainClassification (th)":16.65,"MTOPIntentClassification (de)":42.36,"MTOPIntentClassification (es)":44.73,"MTOPIntentClassification (fr)":38.96,"MTOPIntentClassification (hi)":13.58,"MTOPIntentClassification (th)":5.4,"MassiveIntentClassification (af)":37.22,"MassiveIntentClassification (am)":3.19,"MassiveIntentClassification (ar)":14.26,"MassiveIntentClassification (az)":37.22,"MassiveIntentClassification (bn)":10.76,"MassiveIntentClassification (cy)":32.5,"MassiveIntentClassification (de)":42.78,"MassiveIntentClassification (el)":33.49,"MassiveIntentClassification (es)":44.45,"MassiveIntentClassification (fa)":26.74,"MassiveIntentClassification (fi)":38.1,"MassiveIntentClassification (fr)":46.89,"MassiveIntentClassification (he)":25.2,"MassiveIntentClassification (hi)":13.94,"MassiveIntentClassification (hu)":34.71,"MassiveIntentClassification (hy)":6.71,"MassiveIntentClassification (id)":38.57,"MassiveIntentClassification (is)":32.23,"MassiveIntentClassification (it)":45.8,"MassiveIntentClassification (ja)":29.19,"MassiveIntentClassification (jv)":34.22,"MassiveIntentClassification (ka)":8.89,"MassiveIntentClassification (km)":4.62,"MassiveIntentClassification (kn)":3.17,"MassiveIntentClassification (ko)":15.03,"MassiveIntentClassification (lv)":36.1,"MassiveIntentClassification (ml)":3.0,"MassiveIntentClassification (mn)":23.3,"MassiveIntentClassification (ms)":36.13,"MassiveIntentClassification (my)":3.81,"MassiveIntentClassification (nl)":41.08,"MassiveIntentClassification (pt)":45.2,"MassiveIntentClassification (ro)":39.49,"MassiveIntentClassification (ru)":31.82,"MassiveIntentClassification (sl)":35.45,"MassiveIntentClassification (sq)":36.89,"MassiveIntentClassification (sw)":37.54,"MassiveIntentClassification (ta)":7.91,"MassiveIntentClassification (te)":2.85,"MassiveIntentClassification (th)":10.5,"MassiveIntentClassification (tl)":39.47,"MassiveIntentClassification (tr)":37.5,"MassiveIntentClassification (ur)":16.11,"MassiveIntentClassification (vi)":36.11,"MassiveIntentClassification (zh-TW)":17.22,"MassiveScenarioClassification (af)":47.8,"MassiveScenarioClassification (am)":7.08,"MassiveScenarioClassification (ar)":22.83,"MassiveScenarioClassification (az)":44.95,"MassiveScenarioClassification (bn)":16.59,"MassiveScenarioClassification (cy)":37.92,"MassiveScenarioClassification (de)":58.74,"MassiveScenarioClassification (el)":43.0,"MassiveScenarioClassification (es)":54.47,"MassiveScenarioClassification (fa)":30.58,"MassiveScenarioClassification (fi)":43.57,"MassiveScenarioClassification (fr)":56.99,"MassiveScenarioClassification (he)":28.08,"MassiveScenarioClassification (hi)":18.1,"MassiveScenarioClassification (hu)":41.74,"MassiveScenarioClassification (hy)":11.54,"MassiveScenarioClassification (id)":46.95,"MassiveScenarioClassification (is)":42.78,"MassiveScenarioClassification (it)":54.65,"MassiveScenarioClassification (ja)":35.9,"MassiveScenarioClassification (jv)":42.51,"MassiveScenarioClassification (ka)":13.8,"MassiveScenarioClassification (km)":9.45,"MassiveScenarioClassification (kn)":8.16,"MassiveScenarioClassification (ko)":19.91,"MassiveScenarioClassification (lv)":40.48,"MassiveScenarioClassification (ml)":6.7,"MassiveScenarioClassification (mn)":28.55,"MassiveScenarioClassification (ms)":46.62,"MassiveScenarioClassification (my)":9.98,"MassiveScenarioClassification (nl)":51.76,"MassiveScenarioClassification (pt)":55.6,"MassiveScenarioClassification (ro)":50.54,"MassiveScenarioClassification (ru)":37.73,"MassiveScenarioClassification (sl)":41.67,"MassiveScenarioClassification (sq)":47.38,"MassiveScenarioClassification (sw)":44.18,"MassiveScenarioClassification (ta)":12.6,"MassiveScenarioClassification (te)":7.02,"MassiveScenarioClassification (th)":19.79,"MassiveScenarioClassification (tl)":50.36,"MassiveScenarioClassification (tr)":45.48,"MassiveScenarioClassification (ur)":23.68,"MassiveScenarioClassification (vi)":41.63,"MassiveScenarioClassification (zh-TW)":27.52} -{"level_0":18,"index":101,"Rank":19,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":30.51,"AmazonCounterfactualClassification (de)":59.79,"AmazonCounterfactualClassification (ja)":50.59,"AmazonReviewsClassification (de)":35.06,"AmazonReviewsClassification (es)":37.18,"AmazonReviewsClassification (fr)":35.48,"AmazonReviewsClassification (ja)":22.24,"AmazonReviewsClassification (zh)":21.89,"MTOPDomainClassification (de)":85.42,"MTOPDomainClassification (es)":88.2,"MTOPDomainClassification (fr)":85.05,"MTOPDomainClassification (hi)":21.74,"MTOPDomainClassification (th)":15.87,"MTOPIntentClassification (de)":55.75,"MTOPIntentClassification (es)":57.73,"MTOPIntentClassification (fr)":51.07,"MTOPIntentClassification (hi)":3.19,"MTOPIntentClassification (th)":5.55,"MassiveIntentClassification (af)":42.6,"MassiveIntentClassification (am)":2.12,"MassiveIntentClassification (ar)":4.64,"MassiveIntentClassification (az)":35.05,"MassiveIntentClassification (bn)":2.84,"MassiveIntentClassification (cy)":36.19,"MassiveIntentClassification (de)":55.49,"MassiveIntentClassification (el)":10.14,"MassiveIntentClassification (es)":56.72,"MassiveIntentClassification (fa)":3.54,"MassiveIntentClassification (fi)":37.13,"MassiveIntentClassification (fr)":57.67,"MassiveIntentClassification (he)":2.56,"MassiveIntentClassification (hi)":3.24,"MassiveIntentClassification (hu)":34.22,"MassiveIntentClassification (hy)":3.01,"MassiveIntentClassification (id)":46.54,"MassiveIntentClassification (is)":34.77,"MassiveIntentClassification (it)":54.13,"MassiveIntentClassification (ja)":4.27,"MassiveIntentClassification (jv)":36.97,"MassiveIntentClassification (ka)":2.72,"MassiveIntentClassification (km)":5.35,"MassiveIntentClassification (kn)":3.17,"MassiveIntentClassification (ko)":2.64,"MassiveIntentClassification (lv)":36.32,"MassiveIntentClassification (ml)":3.18,"MassiveIntentClassification (mn)":22.85,"MassiveIntentClassification (ms)":42.87,"MassiveIntentClassification (my)":4.04,"MassiveIntentClassification (nl)":49.53,"MassiveIntentClassification (pt)":57.03,"MassiveIntentClassification (ro)":49.95,"MassiveIntentClassification (ru)":36.58,"MassiveIntentClassification (sl)":39.44,"MassiveIntentClassification (sq)":41.78,"MassiveIntentClassification (sw)":35.85,"MassiveIntentClassification (ta)":2.32,"MassiveIntentClassification (te)":2.2,"MassiveIntentClassification (th)":3.74,"MassiveIntentClassification (tl)":43.12,"MassiveIntentClassification (tr)":35.24,"MassiveIntentClassification (ur)":3.0,"MassiveIntentClassification (vi)":30.01,"MassiveIntentClassification (zh-TW)":3.35,"MassiveScenarioClassification (af)":52.54,"MassiveScenarioClassification (am)":6.3,"MassiveScenarioClassification (ar)":11.96,"MassiveScenarioClassification (az)":40.17,"MassiveScenarioClassification (bn)":8.29,"MassiveScenarioClassification (cy)":42.24,"MassiveScenarioClassification (de)":68.09,"MassiveScenarioClassification (el)":16.66,"MassiveScenarioClassification (es)":64.32,"MassiveScenarioClassification (fa)":6.9,"MassiveScenarioClassification (fi)":43.96,"MassiveScenarioClassification (fr)":66.72,"MassiveScenarioClassification (he)":7.51,"MassiveScenarioClassification (hi)":7.82,"MassiveScenarioClassification (hu)":42.16,"MassiveScenarioClassification (hy)":9.33,"MassiveScenarioClassification (id)":53.54,"MassiveScenarioClassification (is)":42.84,"MassiveScenarioClassification (it)":62.44,"MassiveScenarioClassification (ja)":7.29,"MassiveScenarioClassification (jv)":43.13,"MassiveScenarioClassification (ka)":7.63,"MassiveScenarioClassification (km)":9.08,"MassiveScenarioClassification (kn)":8.1,"MassiveScenarioClassification (ko)":6.35,"MassiveScenarioClassification (lv)":40.24,"MassiveScenarioClassification (ml)":7.65,"MassiveScenarioClassification (mn)":27.98,"MassiveScenarioClassification (ms)":52.41,"MassiveScenarioClassification (my)":9.21,"MassiveScenarioClassification (nl)":60.35,"MassiveScenarioClassification (pt)":62.78,"MassiveScenarioClassification (ro)":59.62,"MassiveScenarioClassification (ru)":43.44,"MassiveScenarioClassification (sl)":44.79,"MassiveScenarioClassification (sq)":50.84,"MassiveScenarioClassification (sw)":44.63,"MassiveScenarioClassification (ta)":7.95,"MassiveScenarioClassification (te)":7.5,"MassiveScenarioClassification (th)":8.79,"MassiveScenarioClassification (tl)":53.54,"MassiveScenarioClassification (tr)":42.47,"MassiveScenarioClassification (ur)":9.58,"MassiveScenarioClassification (vi)":34.68,"MassiveScenarioClassification (zh-TW)":8.77} -{"level_0":19,"index":107,"Rank":20,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":29.6,"AmazonCounterfactualClassification (de)":67.01,"AmazonCounterfactualClassification (ja)":45.61,"AmazonReviewsClassification (de)":44.05,"AmazonReviewsClassification (es)":45.01,"AmazonReviewsClassification (fr)":43.52,"AmazonReviewsClassification (ja)":22.23,"AmazonReviewsClassification (zh)":21.88,"MTOPDomainClassification (de)":83.28,"MTOPDomainClassification (es)":85.32,"MTOPDomainClassification (fr)":85.14,"MTOPDomainClassification (hi)":20.85,"MTOPDomainClassification (th)":15.62,"MTOPIntentClassification (de)":54.65,"MTOPIntentClassification (es)":57.38,"MTOPIntentClassification (fr)":54.39,"MTOPIntentClassification (hi)":3.28,"MTOPIntentClassification (th)":5.08,"MassiveIntentClassification (af)":40.17,"MassiveIntentClassification (am)":2.18,"MassiveIntentClassification (ar)":4.18,"MassiveIntentClassification (az)":30.02,"MassiveIntentClassification (bn)":2.6,"MassiveIntentClassification (cy)":29.15,"MassiveIntentClassification (de)":57.43,"MassiveIntentClassification (el)":9.96,"MassiveIntentClassification (es)":57.97,"MassiveIntentClassification (fa)":3.6,"MassiveIntentClassification (fi)":34.02,"MassiveIntentClassification (fr)":60.99,"MassiveIntentClassification (he)":2.51,"MassiveIntentClassification (hi)":3.02,"MassiveIntentClassification (hu)":31.66,"MassiveIntentClassification (hy)":3.32,"MassiveIntentClassification (id)":41.53,"MassiveIntentClassification (is)":30.25,"MassiveIntentClassification (it)":56.57,"MassiveIntentClassification (ja)":3.5,"MassiveIntentClassification (jv)":31.67,"MassiveIntentClassification (ka)":2.79,"MassiveIntentClassification (km)":5.43,"MassiveIntentClassification (kn)":2.79,"MassiveIntentClassification (ko)":2.67,"MassiveIntentClassification (lv)":34.25,"MassiveIntentClassification (ml)":2.98,"MassiveIntentClassification (mn)":20.99,"MassiveIntentClassification (ms)":37.43,"MassiveIntentClassification (my)":4.02,"MassiveIntentClassification (nl)":50.51,"MassiveIntentClassification (pt)":57.95,"MassiveIntentClassification (ro)":49.37,"MassiveIntentClassification (ru)":33.46,"MassiveIntentClassification (sl)":36.33,"MassiveIntentClassification (sq)":37.65,"MassiveIntentClassification (sw)":30.6,"MassiveIntentClassification (ta)":1.79,"MassiveIntentClassification (te)":2.26,"MassiveIntentClassification (th)":4.02,"MassiveIntentClassification (tl)":38.92,"MassiveIntentClassification (tr)":32.05,"MassiveIntentClassification (ur)":2.7,"MassiveIntentClassification (vi)":21.47,"MassiveIntentClassification (zh-TW)":3.24,"MassiveScenarioClassification (af)":50.81,"MassiveScenarioClassification (am)":6.95,"MassiveScenarioClassification (ar)":12.32,"MassiveScenarioClassification (az)":38.79,"MassiveScenarioClassification (bn)":8.0,"MassiveScenarioClassification (cy)":33.91,"MassiveScenarioClassification (de)":65.33,"MassiveScenarioClassification (el)":16.89,"MassiveScenarioClassification (es)":62.52,"MassiveScenarioClassification (fa)":6.08,"MassiveScenarioClassification (fi)":43.34,"MassiveScenarioClassification (fr)":66.42,"MassiveScenarioClassification (he)":7.55,"MassiveScenarioClassification (hi)":7.44,"MassiveScenarioClassification (hu)":40.85,"MassiveScenarioClassification (hy)":9.25,"MassiveScenarioClassification (id)":51.92,"MassiveScenarioClassification (is)":40.09,"MassiveScenarioClassification (it)":62.94,"MassiveScenarioClassification (ja)":7.9,"MassiveScenarioClassification (jv)":41.33,"MassiveScenarioClassification (ka)":7.76,"MassiveScenarioClassification (km)":9.19,"MassiveScenarioClassification (kn)":8.36,"MassiveScenarioClassification (ko)":6.13,"MassiveScenarioClassification (lv)":40.7,"MassiveScenarioClassification (ml)":6.98,"MassiveScenarioClassification (mn)":27.0,"MassiveScenarioClassification (ms)":46.9,"MassiveScenarioClassification (my)":9.55,"MassiveScenarioClassification (nl)":59.65,"MassiveScenarioClassification (pt)":62.18,"MassiveScenarioClassification (ro)":58.22,"MassiveScenarioClassification (ru)":40.73,"MassiveScenarioClassification (sl)":43.66,"MassiveScenarioClassification (sq)":49.25,"MassiveScenarioClassification (sw)":40.55,"MassiveScenarioClassification (ta)":7.46,"MassiveScenarioClassification (te)":7.03,"MassiveScenarioClassification (th)":8.52,"MassiveScenarioClassification (tl)":51.74,"MassiveScenarioClassification (tr)":43.01,"MassiveScenarioClassification (ur)":9.61,"MassiveScenarioClassification (vi)":28.91,"MassiveScenarioClassification (zh-TW)":7.14} -{"level_0":20,"index":100,"Rank":21,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":28.9,"AmazonCounterfactualClassification (de)":59.38,"AmazonCounterfactualClassification (ja)":45.87,"AmazonReviewsClassification (de)":33.06,"AmazonReviewsClassification (es)":34.0,"AmazonReviewsClassification (fr)":33.48,"AmazonReviewsClassification (ja)":21.78,"AmazonReviewsClassification (zh)":21.83,"MTOPDomainClassification (de)":81.91,"MTOPDomainClassification (es)":84.7,"MTOPDomainClassification (fr)":82.48,"MTOPDomainClassification (hi)":22.11,"MTOPDomainClassification (th)":16.36,"MTOPIntentClassification (de)":52.13,"MTOPIntentClassification (es)":52.62,"MTOPIntentClassification (fr)":46.39,"MTOPIntentClassification (hi)":3.9,"MTOPIntentClassification (th)":5.38,"MassiveIntentClassification (af)":41.02,"MassiveIntentClassification (am)":2.34,"MassiveIntentClassification (ar)":4.87,"MassiveIntentClassification (az)":34.92,"MassiveIntentClassification (bn)":2.52,"MassiveIntentClassification (cy)":35.87,"MassiveIntentClassification (de)":51.48,"MassiveIntentClassification (el)":10.0,"MassiveIntentClassification (es)":53.3,"MassiveIntentClassification (fa)":3.59,"MassiveIntentClassification (fi)":37.35,"MassiveIntentClassification (fr)":54.83,"MassiveIntentClassification (he)":2.52,"MassiveIntentClassification (hi)":2.88,"MassiveIntentClassification (hu)":33.52,"MassiveIntentClassification (hy)":3.13,"MassiveIntentClassification (id)":40.11,"MassiveIntentClassification (is)":34.77,"MassiveIntentClassification (it)":51.21,"MassiveIntentClassification (ja)":4.75,"MassiveIntentClassification (jv)":35.6,"MassiveIntentClassification (ka)":2.71,"MassiveIntentClassification (km)":5.48,"MassiveIntentClassification (kn)":2.44,"MassiveIntentClassification (ko)":2.59,"MassiveIntentClassification (lv)":38.15,"MassiveIntentClassification (ml)":2.67,"MassiveIntentClassification (mn)":18.47,"MassiveIntentClassification (ms)":35.58,"MassiveIntentClassification (my)":4.35,"MassiveIntentClassification (nl)":45.96,"MassiveIntentClassification (pt)":52.27,"MassiveIntentClassification (ro)":46.39,"MassiveIntentClassification (ru)":16.82,"MassiveIntentClassification (sl)":37.3,"MassiveIntentClassification (sq)":41.73,"MassiveIntentClassification (sw)":35.97,"MassiveIntentClassification (ta)":1.52,"MassiveIntentClassification (te)":2.57,"MassiveIntentClassification (th)":3.94,"MassiveIntentClassification (tl)":41.03,"MassiveIntentClassification (tr)":33.75,"MassiveIntentClassification (ur)":2.57,"MassiveIntentClassification (vi)":25.23,"MassiveIntentClassification (zh-TW)":4.64,"MassiveScenarioClassification (af)":51.48,"MassiveScenarioClassification (am)":7.74,"MassiveScenarioClassification (ar)":12.03,"MassiveScenarioClassification (az)":41.77,"MassiveScenarioClassification (bn)":8.07,"MassiveScenarioClassification (cy)":43.67,"MassiveScenarioClassification (de)":63.63,"MassiveScenarioClassification (el)":16.83,"MassiveScenarioClassification (es)":61.48,"MassiveScenarioClassification (fa)":6.48,"MassiveScenarioClassification (fi)":43.54,"MassiveScenarioClassification (fr)":64.06,"MassiveScenarioClassification (he)":8.03,"MassiveScenarioClassification (hi)":7.5,"MassiveScenarioClassification (hu)":42.59,"MassiveScenarioClassification (hy)":9.22,"MassiveScenarioClassification (id)":48.67,"MassiveScenarioClassification (is)":43.87,"MassiveScenarioClassification (it)":59.83,"MassiveScenarioClassification (ja)":5.62,"MassiveScenarioClassification (jv)":42.18,"MassiveScenarioClassification (ka)":7.52,"MassiveScenarioClassification (km)":9.55,"MassiveScenarioClassification (kn)":8.34,"MassiveScenarioClassification (ko)":6.11,"MassiveScenarioClassification (lv)":43.35,"MassiveScenarioClassification (ml)":7.28,"MassiveScenarioClassification (mn)":23.94,"MassiveScenarioClassification (ms)":45.18,"MassiveScenarioClassification (my)":9.33,"MassiveScenarioClassification (nl)":57.02,"MassiveScenarioClassification (pt)":59.45,"MassiveScenarioClassification (ro)":56.8,"MassiveScenarioClassification (ru)":25.85,"MassiveScenarioClassification (sl)":42.51,"MassiveScenarioClassification (sq)":50.41,"MassiveScenarioClassification (sw)":43.02,"MassiveScenarioClassification (ta)":7.21,"MassiveScenarioClassification (te)":6.9,"MassiveScenarioClassification (th)":8.7,"MassiveScenarioClassification (tl)":51.76,"MassiveScenarioClassification (tr)":42.54,"MassiveScenarioClassification (ur)":9.32,"MassiveScenarioClassification (vi)":31.51,"MassiveScenarioClassification (zh-TW)":8.16} -{"level_0":21,"index":106,"Rank":22,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":28.26,"AmazonCounterfactualClassification (de)":67.97,"AmazonCounterfactualClassification (ja)":45.72,"AmazonReviewsClassification (de)":43.16,"AmazonReviewsClassification (es)":42.89,"AmazonReviewsClassification (fr)":41.48,"AmazonReviewsClassification (ja)":22.49,"AmazonReviewsClassification (zh)":22.12,"MTOPDomainClassification (de)":80.56,"MTOPDomainClassification (es)":80.78,"MTOPDomainClassification (fr)":79.6,"MTOPDomainClassification (hi)":21.22,"MTOPDomainClassification (th)":15.82,"MTOPIntentClassification (de)":52.5,"MTOPIntentClassification (es)":52.07,"MTOPIntentClassification (fr)":47.73,"MTOPIntentClassification (hi)":3.74,"MTOPIntentClassification (th)":4.96,"MassiveIntentClassification (af)":38.41,"MassiveIntentClassification (am)":2.49,"MassiveIntentClassification (ar)":4.7,"MassiveIntentClassification (az)":31.77,"MassiveIntentClassification (bn)":2.77,"MassiveIntentClassification (cy)":31.69,"MassiveIntentClassification (de)":52.01,"MassiveIntentClassification (el)":9.74,"MassiveIntentClassification (es)":54.1,"MassiveIntentClassification (fa)":3.86,"MassiveIntentClassification (fi)":34.07,"MassiveIntentClassification (fr)":57.01,"MassiveIntentClassification (he)":2.14,"MassiveIntentClassification (hi)":2.97,"MassiveIntentClassification (hu)":32.01,"MassiveIntentClassification (hy)":3.17,"MassiveIntentClassification (id)":34.55,"MassiveIntentClassification (is)":32.0,"MassiveIntentClassification (it)":52.94,"MassiveIntentClassification (ja)":2.9,"MassiveIntentClassification (jv)":32.42,"MassiveIntentClassification (ka)":2.71,"MassiveIntentClassification (km)":5.5,"MassiveIntentClassification (kn)":2.41,"MassiveIntentClassification (ko)":2.57,"MassiveIntentClassification (lv)":35.09,"MassiveIntentClassification (ml)":2.95,"MassiveIntentClassification (mn)":18.33,"MassiveIntentClassification (ms)":29.69,"MassiveIntentClassification (my)":3.99,"MassiveIntentClassification (nl)":44.95,"MassiveIntentClassification (pt)":51.96,"MassiveIntentClassification (ro)":43.83,"MassiveIntentClassification (ru)":17.32,"MassiveIntentClassification (sl)":33.71,"MassiveIntentClassification (sq)":37.62,"MassiveIntentClassification (sw)":31.9,"MassiveIntentClassification (ta)":1.91,"MassiveIntentClassification (te)":2.54,"MassiveIntentClassification (th)":3.85,"MassiveIntentClassification (tl)":36.83,"MassiveIntentClassification (tr)":33.0,"MassiveIntentClassification (ur)":2.62,"MassiveIntentClassification (vi)":22.81,"MassiveIntentClassification (zh-TW)":3.49,"MassiveScenarioClassification (af)":50.28,"MassiveScenarioClassification (am)":7.15,"MassiveScenarioClassification (ar)":12.12,"MassiveScenarioClassification (az)":39.68,"MassiveScenarioClassification (bn)":8.06,"MassiveScenarioClassification (cy)":38.01,"MassiveScenarioClassification (de)":62.71,"MassiveScenarioClassification (el)":17.19,"MassiveScenarioClassification (es)":59.56,"MassiveScenarioClassification (fa)":6.5,"MassiveScenarioClassification (fi)":41.72,"MassiveScenarioClassification (fr)":63.6,"MassiveScenarioClassification (he)":7.93,"MassiveScenarioClassification (hi)":7.85,"MassiveScenarioClassification (hu)":41.37,"MassiveScenarioClassification (hy)":9.42,"MassiveScenarioClassification (id)":44.88,"MassiveScenarioClassification (is)":40.86,"MassiveScenarioClassification (it)":60.09,"MassiveScenarioClassification (ja)":6.56,"MassiveScenarioClassification (jv)":40.18,"MassiveScenarioClassification (ka)":7.37,"MassiveScenarioClassification (km)":9.56,"MassiveScenarioClassification (kn)":8.4,"MassiveScenarioClassification (ko)":5.96,"MassiveScenarioClassification (lv)":41.44,"MassiveScenarioClassification (ml)":7.47,"MassiveScenarioClassification (mn)":25.36,"MassiveScenarioClassification (ms)":39.69,"MassiveScenarioClassification (my)":9.68,"MassiveScenarioClassification (nl)":56.09,"MassiveScenarioClassification (pt)":57.99,"MassiveScenarioClassification (ro)":56.0,"MassiveScenarioClassification (ru)":27.47,"MassiveScenarioClassification (sl)":41.04,"MassiveScenarioClassification (sq)":49.38,"MassiveScenarioClassification (sw)":40.62,"MassiveScenarioClassification (ta)":7.59,"MassiveScenarioClassification (te)":7.07,"MassiveScenarioClassification (th)":8.52,"MassiveScenarioClassification (tl)":49.89,"MassiveScenarioClassification (tr)":43.08,"MassiveScenarioClassification (ur)":9.31,"MassiveScenarioClassification (vi)":27.46,"MassiveScenarioClassification (zh-TW)":7.24} -{"level_0":22,"index":105,"Rank":23,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":26.73,"AmazonCounterfactualClassification (de)":69.98,"AmazonCounterfactualClassification (ja)":46.05,"AmazonReviewsClassification (de)":37.9,"AmazonReviewsClassification (es)":37.33,"AmazonReviewsClassification (fr)":37.35,"AmazonReviewsClassification (ja)":22.29,"AmazonReviewsClassification (zh)":21.53,"MTOPDomainClassification (de)":76.98,"MTOPDomainClassification (es)":73.61,"MTOPDomainClassification (fr)":75.03,"MTOPDomainClassification (hi)":21.4,"MTOPDomainClassification (th)":16.21,"MTOPIntentClassification (de)":44.43,"MTOPIntentClassification (es)":42.03,"MTOPIntentClassification (fr)":43.85,"MTOPIntentClassification (hi)":3.8,"MTOPIntentClassification (th)":5.21,"MassiveIntentClassification (af)":34.32,"MassiveIntentClassification (am)":2.38,"MassiveIntentClassification (ar)":4.53,"MassiveIntentClassification (az)":31.76,"MassiveIntentClassification (bn)":2.58,"MassiveIntentClassification (cy)":28.94,"MassiveIntentClassification (de)":45.23,"MassiveIntentClassification (el)":10.05,"MassiveIntentClassification (es)":45.32,"MassiveIntentClassification (fa)":3.58,"MassiveIntentClassification (fi)":33.52,"MassiveIntentClassification (fr)":51.13,"MassiveIntentClassification (he)":2.63,"MassiveIntentClassification (hi)":2.68,"MassiveIntentClassification (hu)":32.31,"MassiveIntentClassification (hy)":3.33,"MassiveIntentClassification (id)":35.5,"MassiveIntentClassification (is)":29.82,"MassiveIntentClassification (it)":45.59,"MassiveIntentClassification (ja)":3.67,"MassiveIntentClassification (jv)":31.15,"MassiveIntentClassification (ka)":2.77,"MassiveIntentClassification (km)":5.66,"MassiveIntentClassification (kn)":2.59,"MassiveIntentClassification (ko)":2.34,"MassiveIntentClassification (lv)":33.97,"MassiveIntentClassification (ml)":2.55,"MassiveIntentClassification (mn)":14.7,"MassiveIntentClassification (ms)":33.12,"MassiveIntentClassification (my)":4.42,"MassiveIntentClassification (nl)":37.96,"MassiveIntentClassification (pt)":43.35,"MassiveIntentClassification (ro)":42.69,"MassiveIntentClassification (ru)":14.82,"MassiveIntentClassification (sl)":34.54,"MassiveIntentClassification (sq)":38.54,"MassiveIntentClassification (sw)":32.14,"MassiveIntentClassification (ta)":1.41,"MassiveIntentClassification (te)":2.5,"MassiveIntentClassification (th)":3.71,"MassiveIntentClassification (tl)":36.04,"MassiveIntentClassification (tr)":33.77,"MassiveIntentClassification (ur)":2.99,"MassiveIntentClassification (vi)":22.62,"MassiveIntentClassification (zh-TW)":4.63,"MassiveScenarioClassification (af)":44.45,"MassiveScenarioClassification (am)":7.51,"MassiveScenarioClassification (ar)":12.32,"MassiveScenarioClassification (az)":38.41,"MassiveScenarioClassification (bn)":8.45,"MassiveScenarioClassification (cy)":35.04,"MassiveScenarioClassification (de)":59.12,"MassiveScenarioClassification (el)":17.68,"MassiveScenarioClassification (es)":55.61,"MassiveScenarioClassification (fa)":6.86,"MassiveScenarioClassification (fi)":41.34,"MassiveScenarioClassification (fr)":59.92,"MassiveScenarioClassification (he)":7.86,"MassiveScenarioClassification (hi)":7.63,"MassiveScenarioClassification (hu)":41.31,"MassiveScenarioClassification (hy)":9.23,"MassiveScenarioClassification (id)":44.64,"MassiveScenarioClassification (is)":39.63,"MassiveScenarioClassification (it)":54.58,"MassiveScenarioClassification (ja)":4.96,"MassiveScenarioClassification (jv)":40.73,"MassiveScenarioClassification (ka)":7.51,"MassiveScenarioClassification (km)":8.73,"MassiveScenarioClassification (kn)":7.99,"MassiveScenarioClassification (ko)":6.03,"MassiveScenarioClassification (lv)":36.42,"MassiveScenarioClassification (ml)":6.96,"MassiveScenarioClassification (mn)":19.85,"MassiveScenarioClassification (ms)":43.18,"MassiveScenarioClassification (my)":9.46,"MassiveScenarioClassification (nl)":50.0,"MassiveScenarioClassification (pt)":52.24,"MassiveScenarioClassification (ro)":53.7,"MassiveScenarioClassification (ru)":20.69,"MassiveScenarioClassification (sl)":39.79,"MassiveScenarioClassification (sq)":50.16,"MassiveScenarioClassification (sw)":40.48,"MassiveScenarioClassification (ta)":7.47,"MassiveScenarioClassification (te)":6.87,"MassiveScenarioClassification (th)":8.26,"MassiveScenarioClassification (tl)":48.94,"MassiveScenarioClassification (tr)":41.83,"MassiveScenarioClassification (ur)":9.77,"MassiveScenarioClassification (vi)":30.01,"MassiveScenarioClassification (zh-TW)":7.91} -{"level_0":23,"index":98,"Rank":24,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":24.49,"AmazonCounterfactualClassification (de)":54.46,"AmazonCounterfactualClassification (ja)":43.87,"AmazonReviewsClassification (de)":24.08,"AmazonReviewsClassification (es)":23.88,"AmazonReviewsClassification (fr)":23.31,"AmazonReviewsClassification (ja)":20.25,"AmazonReviewsClassification (zh)":20.49,"MTOPDomainClassification (de)":48.55,"MTOPDomainClassification (es)":58.39,"MTOPDomainClassification (fr)":54.61,"MTOPDomainClassification (hi)":21.22,"MTOPDomainClassification (th)":14.98,"MTOPIntentClassification (de)":35.55,"MTOPIntentClassification (es)":36.72,"MTOPIntentClassification (fr)":34.71,"MTOPIntentClassification (hi)":4.44,"MTOPIntentClassification (th)":4.67,"MassiveIntentClassification (af)":33.68,"MassiveIntentClassification (am)":2.94,"MassiveIntentClassification (ar)":10.04,"MassiveIntentClassification (az)":30.74,"MassiveIntentClassification (bn)":3.02,"MassiveIntentClassification (cy)":33.94,"MassiveIntentClassification (de)":36.06,"MassiveIntentClassification (el)":27.7,"MassiveIntentClassification (es)":35.6,"MassiveIntentClassification (fa)":17.97,"MassiveIntentClassification (fi)":35.53,"MassiveIntentClassification (fr)":38.41,"MassiveIntentClassification (he)":2.69,"MassiveIntentClassification (hi)":3.43,"MassiveIntentClassification (hu)":34.05,"MassiveIntentClassification (hy)":3.11,"MassiveIntentClassification (id)":40.02,"MassiveIntentClassification (is)":32.63,"MassiveIntentClassification (it)":39.28,"MassiveIntentClassification (ja)":4.95,"MassiveIntentClassification (jv)":34.95,"MassiveIntentClassification (ka)":2.57,"MassiveIntentClassification (km)":4.73,"MassiveIntentClassification (kn)":3.54,"MassiveIntentClassification (ko)":2.68,"MassiveIntentClassification (lv)":37.91,"MassiveIntentClassification (ml)":2.88,"MassiveIntentClassification (mn)":16.94,"MassiveIntentClassification (ms)":36.6,"MassiveIntentClassification (my)":3.96,"MassiveIntentClassification (nl)":33.95,"MassiveIntentClassification (pt)":43.05,"MassiveIntentClassification (ro)":36.2,"MassiveIntentClassification (ru)":25.3,"MassiveIntentClassification (sl)":35.9,"MassiveIntentClassification (sq)":36.6,"MassiveIntentClassification (sw)":34.81,"MassiveIntentClassification (ta)":3.11,"MassiveIntentClassification (te)":2.53,"MassiveIntentClassification (th)":4.38,"MassiveIntentClassification (tl)":35.51,"MassiveIntentClassification (tr)":32.02,"MassiveIntentClassification (ur)":9.61,"MassiveIntentClassification (vi)":37.07,"MassiveIntentClassification (zh-TW)":4.79,"MassiveScenarioClassification (af)":36.17,"MassiveScenarioClassification (am)":7.64,"MassiveScenarioClassification (ar)":15.26,"MassiveScenarioClassification (az)":30.73,"MassiveScenarioClassification (bn)":7.15,"MassiveScenarioClassification (cy)":34.73,"MassiveScenarioClassification (de)":38.62,"MassiveScenarioClassification (el)":27.18,"MassiveScenarioClassification (es)":39.44,"MassiveScenarioClassification (fa)":21.43,"MassiveScenarioClassification (fi)":33.21,"MassiveScenarioClassification (fr)":40.26,"MassiveScenarioClassification (he)":7.42,"MassiveScenarioClassification (hi)":8.06,"MassiveScenarioClassification (hu)":34.54,"MassiveScenarioClassification (hy)":8.61,"MassiveScenarioClassification (id)":40.04,"MassiveScenarioClassification (is)":33.57,"MassiveScenarioClassification (it)":40.1,"MassiveScenarioClassification (ja)":9.96,"MassiveScenarioClassification (jv)":36.11,"MassiveScenarioClassification (ka)":7.13,"MassiveScenarioClassification (km)":9.66,"MassiveScenarioClassification (kn)":7.55,"MassiveScenarioClassification (ko)":7.27,"MassiveScenarioClassification (lv)":37.03,"MassiveScenarioClassification (ml)":7.22,"MassiveScenarioClassification (mn)":21.53,"MassiveScenarioClassification (ms)":37.57,"MassiveScenarioClassification (my)":9.54,"MassiveScenarioClassification (nl)":34.62,"MassiveScenarioClassification (pt)":44.68,"MassiveScenarioClassification (ro)":37.29,"MassiveScenarioClassification (ru)":28.16,"MassiveScenarioClassification (sl)":37.95,"MassiveScenarioClassification (sq)":37.82,"MassiveScenarioClassification (sw)":35.37,"MassiveScenarioClassification (ta)":7.19,"MassiveScenarioClassification (te)":7.29,"MassiveScenarioClassification (th)":9.47,"MassiveScenarioClassification (tl)":37.31,"MassiveScenarioClassification (tr)":34.57,"MassiveScenarioClassification (ur)":16.17,"MassiveScenarioClassification (vi)":35.91,"MassiveScenarioClassification (zh-TW)":10.19} -{"level_0":24,"index":0,"Rank":25,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.59,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.05,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":66.09,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.83,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.71,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":25,"index":1,"Rank":26,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":37.26,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":79.79,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":45.62,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":53.7,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":62.46,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":26,"index":2,"Rank":27,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":42.15,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":87.68,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":59.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":63.08,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":70.15,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":27,"index":3,"Rank":28,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.98,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.12,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":62.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.94,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":72.78,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":28,"index":4,"Rank":29,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.36,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.33,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":60.52,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":68.06,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":74.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":29,"index":6,"Rank":30,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.18,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":30,"index":7,"Rank":31,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":35.07,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":75.7,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.76,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":56.03,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":59.3,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":31,"index":8,"Rank":32,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":52.95,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":32,"index":9,"Rank":33,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":53.47,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.21,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":93.48,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":80.23,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":76.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":79.1,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":33,"index":10,"Rank":34,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.53,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":53.98,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":96.69,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":87.47,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":81.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":86.64,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":34,"index":11,"Rank":35,"Model":"tao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.81,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":35,"index":12,"Rank":36,"Model":"tao-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.8,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":36,"index":13,"Rank":37,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.15,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":37,"index":14,"Rank":38,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":41.94,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":38,"index":15,"Rank":39,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":41.38,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":39,"index":16,"Rank":40,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.19,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":53.0,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":97.2,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":93.07,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":79.6,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":82.18,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":40,"index":17,"Rank":41,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":35.91,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":41,"index":18,"Rank":42,"Model":"Yinka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.5,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":42,"index":19,"Rank":43,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":54.11,"AmazonCounterfactualClassification (ja)":53.95,"AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":39.68,"MTOPDomainClassification (de)":57.22,"MTOPDomainClassification (es)":58.4,"MTOPDomainClassification (fr)":54.97,"MTOPDomainClassification (hi)":64.17,"MTOPDomainClassification (th)":70.47,"MTOPIntentClassification (de)":32.18,"MTOPIntentClassification (es)":33.9,"MTOPIntentClassification (fr)":26.69,"MTOPIntentClassification (hi)":38.27,"MTOPIntentClassification (th)":42.73,"MassiveIntentClassification (af)":33.5,"MassiveIntentClassification (am)":19.56,"MassiveIntentClassification (ar)":26.46,"MassiveIntentClassification (az)":31.58,"MassiveIntentClassification (bn)":27.99,"MassiveIntentClassification (cy)":28.26,"MassiveIntentClassification (de)":34.2,"MassiveIntentClassification (el)":26.02,"MassiveIntentClassification (es)":36.37,"MassiveIntentClassification (fa)":48.91,"MassiveIntentClassification (fi)":30.11,"MassiveIntentClassification (fr)":37.53,"MassiveIntentClassification (he)":24.86,"MassiveIntentClassification (hi)":39.14,"MassiveIntentClassification (hu)":31.97,"MassiveIntentClassification (hy)":31.36,"MassiveIntentClassification (id)":37.04,"MassiveIntentClassification (is)":28.61,"MassiveIntentClassification (it)":37.86,"MassiveIntentClassification (ja)":47.9,"MassiveIntentClassification (jv)":29.08,"MassiveIntentClassification (ka)":25.77,"MassiveIntentClassification (km)":23.66,"MassiveIntentClassification (kn)":21.27,"MassiveIntentClassification (ko)":40.42,"MassiveIntentClassification (lv)":30.13,"MassiveIntentClassification (ml)":25.89,"MassiveIntentClassification (mn)":27.71,"MassiveIntentClassification (ms)":33.04,"MassiveIntentClassification (my)":24.19,"MassiveIntentClassification (nl)":39.31,"MassiveIntentClassification (pt)":40.26,"MassiveIntentClassification (ro)":35.42,"MassiveIntentClassification (ru)":39.69,"MassiveIntentClassification (sl)":31.09,"MassiveIntentClassification (sq)":35.15,"MassiveIntentClassification (sw)":27.91,"MassiveIntentClassification (ta)":28.12,"MassiveIntentClassification (te)":26.34,"MassiveIntentClassification (th)":48.24,"MassiveIntentClassification (tl)":32.73,"MassiveIntentClassification (tr)":30.21,"MassiveIntentClassification (ur)":30.28,"MassiveIntentClassification (vi)":40.45,"MassiveIntentClassification (zh-TW)":64.03,"MassiveScenarioClassification (af)":43.53,"MassiveScenarioClassification (am)":25.3,"MassiveScenarioClassification (ar)":34.91,"MassiveScenarioClassification (az)":36.37,"MassiveScenarioClassification (bn)":39.2,"MassiveScenarioClassification (cy)":32.18,"MassiveScenarioClassification (de)":43.92,"MassiveScenarioClassification (el)":35.03,"MassiveScenarioClassification (es)":41.96,"MassiveScenarioClassification (fa)":58.36,"MassiveScenarioClassification (fi)":33.95,"MassiveScenarioClassification (fr)":45.32,"MassiveScenarioClassification (he)":34.06,"MassiveScenarioClassification (hi)":48.77,"MassiveScenarioClassification (hu)":39.92,"MassiveScenarioClassification (hy)":38.09,"MassiveScenarioClassification (id)":45.08,"MassiveScenarioClassification (is)":36.55,"MassiveScenarioClassification (it)":44.38,"MassiveScenarioClassification (ja)":57.02,"MassiveScenarioClassification (jv)":35.51,"MassiveScenarioClassification (ka)":33.41,"MassiveScenarioClassification (km)":30.9,"MassiveScenarioClassification (kn)":26.83,"MassiveScenarioClassification (ko)":49.52,"MassiveScenarioClassification (lv)":34.02,"MassiveScenarioClassification (ml)":34.55,"MassiveScenarioClassification (mn)":34.14,"MassiveScenarioClassification (ms)":42.71,"MassiveScenarioClassification (my)":31.0,"MassiveScenarioClassification (nl)":51.44,"MassiveScenarioClassification (pt)":45.9,"MassiveScenarioClassification (ro)":45.01,"MassiveScenarioClassification (ru)":48.66,"MassiveScenarioClassification (sl)":38.34,"MassiveScenarioClassification (sq)":44.78,"MassiveScenarioClassification (sw)":36.02,"MassiveScenarioClassification (ta)":37.81,"MassiveScenarioClassification (te)":34.6,"MassiveScenarioClassification (th)":57.38,"MassiveScenarioClassification (tl)":39.36,"MassiveScenarioClassification (tr)":36.16,"MassiveScenarioClassification (ur)":36.43,"MassiveScenarioClassification (vi)":47.04,"MassiveScenarioClassification (zh-TW)":71.96} -{"level_0":43,"index":20,"Rank":44,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":42.04,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":25.8,"MassiveIntentClassification (am)":3.34,"MassiveIntentClassification (ar)":6.49,"MassiveIntentClassification (az)":24.77,"MassiveIntentClassification (bn)":4.3,"MassiveIntentClassification (cy)":26.39,"MassiveIntentClassification (de)":28.09,"MassiveIntentClassification (el)":19.24,"MassiveIntentClassification (es)":30.62,"MassiveIntentClassification (fa)":7.21,"MassiveIntentClassification (fi)":27.21,"MassiveIntentClassification (fr)":32.64,"MassiveIntentClassification (he)":2.66,"MassiveIntentClassification (hi)":4.59,"MassiveIntentClassification (hu)":25.65,"MassiveIntentClassification (hy)":4.86,"MassiveIntentClassification (id)":29.81,"MassiveIntentClassification (is)":23.53,"MassiveIntentClassification (it)":34.47,"MassiveIntentClassification (ja)":39.4,"MassiveIntentClassification (jv)":28.75,"MassiveIntentClassification (ka)":4.34,"MassiveIntentClassification (km)":6.1,"MassiveIntentClassification (kn)":4.46,"MassiveIntentClassification (ko)":14.16,"MassiveIntentClassification (lv)":29.86,"MassiveIntentClassification (ml)":3.69,"MassiveIntentClassification (mn)":7.86,"MassiveIntentClassification (ms)":28.05,"MassiveIntentClassification (my)":6.98,"MassiveIntentClassification (nl)":32.92,"MassiveIntentClassification (pt)":33.53,"MassiveIntentClassification (ro)":31.32,"MassiveIntentClassification (ru)":11.27,"MassiveIntentClassification (sl)":27.94,"MassiveIntentClassification (sq)":32.9,"MassiveIntentClassification (sw)":29.4,"MassiveIntentClassification (ta)":3.33,"MassiveIntentClassification (te)":3.46,"MassiveIntentClassification (th)":12.98,"MassiveIntentClassification (tl)":30.73,"MassiveIntentClassification (tr)":23.57,"MassiveIntentClassification (ur)":4.98,"MassiveIntentClassification (vi)":21.89,"MassiveIntentClassification (zh-TW)":65.53,"MassiveScenarioClassification (af)":31.55,"MassiveScenarioClassification (am)":7.49,"MassiveScenarioClassification (ar)":15.0,"MassiveScenarioClassification (az)":29.13,"MassiveScenarioClassification (bn)":9.24,"MassiveScenarioClassification (cy)":29.72,"MassiveScenarioClassification (de)":34.68,"MassiveScenarioClassification (el)":28.83,"MassiveScenarioClassification (es)":35.97,"MassiveScenarioClassification (fa)":11.12,"MassiveScenarioClassification (fi)":28.61,"MassiveScenarioClassification (fr)":40.66,"MassiveScenarioClassification (he)":9.01,"MassiveScenarioClassification (hi)":9.92,"MassiveScenarioClassification (hu)":32.07,"MassiveScenarioClassification (hy)":8.44,"MassiveScenarioClassification (id)":34.9,"MassiveScenarioClassification (is)":30.95,"MassiveScenarioClassification (it)":41.06,"MassiveScenarioClassification (ja)":48.73,"MassiveScenarioClassification (jv)":35.09,"MassiveScenarioClassification (ka)":9.29,"MassiveScenarioClassification (km)":11.19,"MassiveScenarioClassification (kn)":10.1,"MassiveScenarioClassification (ko)":19.2,"MassiveScenarioClassification (lv)":32.49,"MassiveScenarioClassification (ml)":6.37,"MassiveScenarioClassification (mn)":13.08,"MassiveScenarioClassification (ms)":39.18,"MassiveScenarioClassification (my)":12.25,"MassiveScenarioClassification (nl)":38.17,"MassiveScenarioClassification (pt)":40.01,"MassiveScenarioClassification (ro)":39.25,"MassiveScenarioClassification (ru)":16.71,"MassiveScenarioClassification (sl)":33.94,"MassiveScenarioClassification (sq)":40.4,"MassiveScenarioClassification (sw)":37.14,"MassiveScenarioClassification (ta)":8.21,"MassiveScenarioClassification (te)":7.97,"MassiveScenarioClassification (th)":21.56,"MassiveScenarioClassification (tl)":36.7,"MassiveScenarioClassification (tr)":28.8,"MassiveScenarioClassification (ur)":10.46,"MassiveScenarioClassification (vi)":27.72,"MassiveScenarioClassification (zh-TW)":71.52} -{"level_0":44,"index":21,"Rank":45,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":43.32,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":28.81,"MassiveIntentClassification (am)":3.04,"MassiveIntentClassification (ar)":6.75,"MassiveIntentClassification (az)":27.0,"MassiveIntentClassification (bn)":3.24,"MassiveIntentClassification (cy)":31.18,"MassiveIntentClassification (de)":30.65,"MassiveIntentClassification (el)":18.17,"MassiveIntentClassification (es)":32.53,"MassiveIntentClassification (fa)":8.72,"MassiveIntentClassification (fi)":31.79,"MassiveIntentClassification (fr)":33.16,"MassiveIntentClassification (he)":3.03,"MassiveIntentClassification (hi)":3.61,"MassiveIntentClassification (hu)":30.47,"MassiveIntentClassification (hy)":5.35,"MassiveIntentClassification (id)":32.45,"MassiveIntentClassification (is)":30.12,"MassiveIntentClassification (it)":36.32,"MassiveIntentClassification (ja)":41.09,"MassiveIntentClassification (jv)":30.42,"MassiveIntentClassification (ka)":3.79,"MassiveIntentClassification (km)":6.79,"MassiveIntentClassification (kn)":3.86,"MassiveIntentClassification (ko)":8.82,"MassiveIntentClassification (lv)":30.23,"MassiveIntentClassification (ml)":2.93,"MassiveIntentClassification (mn)":12.61,"MassiveIntentClassification (ms)":30.66,"MassiveIntentClassification (my)":5.85,"MassiveIntentClassification (nl)":34.1,"MassiveIntentClassification (pt)":36.92,"MassiveIntentClassification (ro)":33.01,"MassiveIntentClassification (ru)":10.4,"MassiveIntentClassification (sl)":30.73,"MassiveIntentClassification (sq)":36.98,"MassiveIntentClassification (sw)":31.62,"MassiveIntentClassification (ta)":3.19,"MassiveIntentClassification (te)":2.59,"MassiveIntentClassification (th)":4.61,"MassiveIntentClassification (tl)":32.55,"MassiveIntentClassification (tr)":26.87,"MassiveIntentClassification (ur)":4.23,"MassiveIntentClassification (vi)":29.24,"MassiveIntentClassification (zh-TW)":65.49,"MassiveScenarioClassification (af)":35.41,"MassiveScenarioClassification (am)":9.05,"MassiveScenarioClassification (ar)":14.92,"MassiveScenarioClassification (az)":31.97,"MassiveScenarioClassification (bn)":9.15,"MassiveScenarioClassification (cy)":37.45,"MassiveScenarioClassification (de)":38.33,"MassiveScenarioClassification (el)":24.45,"MassiveScenarioClassification (es)":37.73,"MassiveScenarioClassification (fa)":11.84,"MassiveScenarioClassification (fi)":34.49,"MassiveScenarioClassification (fr)":40.92,"MassiveScenarioClassification (he)":7.64,"MassiveScenarioClassification (hi)":8.64,"MassiveScenarioClassification (hu)":37.25,"MassiveScenarioClassification (hy)":10.91,"MassiveScenarioClassification (id)":36.11,"MassiveScenarioClassification (is)":37.8,"MassiveScenarioClassification (it)":41.68,"MassiveScenarioClassification (ja)":48.38,"MassiveScenarioClassification (jv)":35.2,"MassiveScenarioClassification (ka)":9.9,"MassiveScenarioClassification (km)":12.75,"MassiveScenarioClassification (kn)":10.31,"MassiveScenarioClassification (ko)":14.52,"MassiveScenarioClassification (lv)":33.08,"MassiveScenarioClassification (ml)":7.44,"MassiveScenarioClassification (mn)":17.98,"MassiveScenarioClassification (ms)":37.93,"MassiveScenarioClassification (my)":11.73,"MassiveScenarioClassification (nl)":40.37,"MassiveScenarioClassification (pt)":41.83,"MassiveScenarioClassification (ro)":40.63,"MassiveScenarioClassification (ru)":18.96,"MassiveScenarioClassification (sl)":35.3,"MassiveScenarioClassification (sq)":41.96,"MassiveScenarioClassification (sw)":38.88,"MassiveScenarioClassification (ta)":8.51,"MassiveScenarioClassification (te)":7.35,"MassiveScenarioClassification (th)":10.1,"MassiveScenarioClassification (tl)":35.91,"MassiveScenarioClassification (tr)":32.08,"MassiveScenarioClassification (ur)":10.37,"MassiveScenarioClassification (vi)":33.91,"MassiveScenarioClassification (zh-TW)":71.0} -{"level_0":45,"index":22,"Rank":46,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":38.6,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":80.79,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":50.01,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":56.31,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":59.5,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":46,"index":23,"Rank":47,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.89,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.23,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":61.07,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.94,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":67.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":47,"index":24,"Rank":48,"Model":"Dmeta-embedding-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":44.93,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":48,"index":25,"Rank":49,"Model":"Dmeta-embedding-zh-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":44.88,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":49,"index":26,"Rank":50,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":37.51,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":50,"index":27,"Rank":51,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":38.25,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":51,"index":28,"Rank":52,"Model":"IYun-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.67,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":52,"index":29,"Rank":53,"Model":"mist-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":39.32,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":53,"index":30,"Rank":54,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":33.77,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":54,"index":31,"Rank":55,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.38,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.65,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.87,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.28,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.5,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":55,"index":32,"Rank":56,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.35,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.7,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.85,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.28,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":56,"index":33,"Rank":57,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.39,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.63,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.86,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.3,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":57,"index":35,"Rank":58,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":40.35,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.83,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":59.13,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":64.99,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.72,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":58,"index":36,"Rank":59,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.42,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.33,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":62.85,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.81,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":72.99,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":59,"index":37,"Rank":60,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":44.11,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":87.82,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.63,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.14,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":72.74,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":60,"index":38,"Rank":61,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.48,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":84.19,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.35,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":64.57,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.04,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":61,"index":41,"Rank":62,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":66.64,"AmazonCounterfactualClassification (ja)":58.06,"AmazonReviewsClassification (de)":35.29,"AmazonReviewsClassification (es)":38.34,"AmazonReviewsClassification (fr)":37.84,"AmazonReviewsClassification (ja)":30.94,"AmazonReviewsClassification (zh)":33.75,"MTOPDomainClassification (de)":84.54,"MTOPDomainClassification (es)":86.46,"MTOPDomainClassification (fr)":81.32,"MTOPDomainClassification (hi)":58.23,"MTOPDomainClassification (th)":72.29,"MTOPIntentClassification (de)":60.52,"MTOPIntentClassification (es)":64.32,"MTOPIntentClassification (fr)":58.67,"MTOPIntentClassification (hi)":41.96,"MTOPIntentClassification (th)":55.28,"MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":62,"index":42,"Rank":63,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":42.08,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":89.26,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":68.55,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":67.4,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":63,"index":43,"Rank":64,"Model":"alime-embedding-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.95,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":64,"index":44,"Rank":65,"Model":"stella-mrl-large-zh-v3.5-1792d-1024<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.59,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":65,"index":45,"Rank":66,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":34.79,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.52,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.12,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":59.41,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":65.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":66,"index":47,"Rank":67,"Model":"acge_text_embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":48.54,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":67,"index":48,"Rank":68,"Model":"sgpt-bloom-1b7-nli<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":39.29,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":37.63,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":83.8,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.36,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":64.58,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.6,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":68,"index":50,"Rank":69,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.53,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":53.98,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":96.69,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":87.47,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":81.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":86.64,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":69,"index":51,"Rank":70,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.03,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":77.1,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":43.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":51.59,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":61.28,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":70,"index":52,"Rank":71,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":37.97,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.74,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.62,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":71,"index":53,"Rank":72,"Model":"stella-large-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.32,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":72,"index":54,"Rank":73,"Model":"stella-mrl-large-zh-v3.5-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.59,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":73,"index":56,"Rank":74,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":24.9,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":25.55,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":9.49,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":6.98,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":11.41,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":74,"index":57,"Rank":75,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":23.52,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":27.74,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":8.61,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":6.24,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":10.98,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":75,"index":58,"Rank":76,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":22.45,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":24.27,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":9.79,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":16.41,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":22.72,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":76,"index":59,"Rank":77,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.39,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.61,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.84,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.3,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":77,"index":60,"Rank":78,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.02,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":64.49,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":39.4,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":38.01,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":43.63,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":78,"index":61,"Rank":79,"Model":"zpoint_large_embedding_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.72,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":79,"index":62,"Rank":80,"Model":"stella-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.25,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":80,"index":63,"Rank":81,"Model":"stella-base-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":39.64,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":81,"index":64,"Rank":82,"Model":"stella-base-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.34,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":82,"index":65,"Rank":83,"Model":"stella-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.57,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":83,"index":66,"Rank":84,"Model":"stella-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.82,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":84,"index":74,"Rank":85,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":68.92,"AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":37.72,"AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":88.37,"MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":63.83,"MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":63.89,"MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":71.25,"MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":85,"index":75,"Rank":86,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":38.68,"AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":89.89,"MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":68.76,"MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":66.93,"MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":71.23,"MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":86,"index":76,"Rank":87,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.94,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":87,"index":77,"Rank":88,"Model":"xiaobu-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.72,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":88,"index":78,"Rank":89,"Model":"xiaobu-embedding-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":50.07,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":89,"index":79,"Rank":90,"Model":"lim_base_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.67,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":90,"index":80,"Rank":91,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":42.33,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.39,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":60.88,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.7,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":74.58,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":91,"index":81,"Rank":92,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.62,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.7,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":67.69,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.86,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.6,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":92,"index":82,"Rank":93,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.48,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.96,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":67.76,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.3,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.7,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":93,"index":83,"Rank":94,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":35.11,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.52,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":65.93,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.48,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":94,"index":84,"Rank":95,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":34.62,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.97,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":62.59,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.67,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.61,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":95,"index":85,"Rank":96,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":43.02,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":96,"index":86,"Rank":97,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":44.44,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":97,"index":87,"Rank":98,"Model":"m3e-ernie-xbase-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.79,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":98,"index":88,"Rank":99,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":53.47,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.21,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":93.48,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":80.23,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":76.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":79.1,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":99,"index":89,"Rank":100,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.53,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":53.98,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":96.69,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":87.47,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":81.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":86.64,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":100,"index":90,"Rank":101,"Model":"mmarco-bert-base-italian-uncased<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":55.06,"MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":63.04,"MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":101,"index":91,"Rank":102,"Model":"mmarco-sentence-flare-it<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":22.3,"MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":27.41,"MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":102,"index":92,"Rank":103,"Model":"stsbm-sentence-flare-it<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":38.88,"MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":43.3,"MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":103,"index":93,"Rank":104,"Model":"piccolo-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.24,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":104,"index":94,"Rank":105,"Model":"piccolo-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.33,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":105,"index":95,"Rank":106,"Model":"piccolo-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.44,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":106,"index":102,"Rank":107,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":27.05,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":72.97,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.18,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":42.64,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":49.92,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":107,"index":103,"Rank":108,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AmazonCounterfactualClassification (de)":68.35,"AmazonCounterfactualClassification (ja)":63.45,"AmazonReviewsClassification (de)":35.91,"AmazonReviewsClassification (es)":37.49,"AmazonReviewsClassification (fr)":35.3,"AmazonReviewsClassification (ja)":33.24,"AmazonReviewsClassification (zh)":35.26,"MTOPDomainClassification (de)":79.2,"MTOPDomainClassification (es)":83.04,"MTOPDomainClassification (fr)":78.63,"MTOPDomainClassification (hi)":81.36,"MTOPDomainClassification (th)":79.99,"MTOPIntentClassification (de)":54.23,"MTOPIntentClassification (es)":60.28,"MTOPIntentClassification (fr)":54.05,"MTOPIntentClassification (hi)":59.9,"MTOPIntentClassification (th)":61.96,"MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":57.52,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":64.52,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":108,"index":104,"Rank":109,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":69.95,"AmazonCounterfactualClassification (ja)":69.79,"AmazonReviewsClassification (de)":39.52,"AmazonReviewsClassification (es)":39.99,"AmazonReviewsClassification (fr)":39.0,"AmazonReviewsClassification (ja)":36.64,"AmazonReviewsClassification (zh)":37.74,"MTOPDomainClassification (de)":85.73,"MTOPDomainClassification (es)":86.96,"MTOPDomainClassification (fr)":81.21,"MTOPDomainClassification (hi)":84.76,"MTOPDomainClassification (th)":82.51,"MTOPIntentClassification (de)":61.27,"MTOPIntentClassification (es)":66.59,"MTOPIntentClassification (fr)":59.76,"MTOPIntentClassification (hi)":62.37,"MTOPIntentClassification (th)":64.8,"MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":61.88,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":67.9,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":109,"index":108,"Rank":110,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":46.09,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.2,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.33,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.91,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":68.53,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":110,"index":109,"Rank":111,"Model":"ZNV-Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":29.75,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":111,"index":110,"Rank":112,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":29.85,"MassiveIntentClassification (am)":2.13,"MassiveIntentClassification (ar)":3.41,"MassiveIntentClassification (az)":22.73,"MassiveIntentClassification (bn)":2.87,"MassiveIntentClassification (cy)":29.43,"MassiveIntentClassification (de)":30.85,"MassiveIntentClassification (el)":8.74,"MassiveIntentClassification (es)":30.63,"MassiveIntentClassification (fa)":3.16,"MassiveIntentClassification (fi)":30.4,"MassiveIntentClassification (fr)":30.84,"MassiveIntentClassification (he)":2.06,"MassiveIntentClassification (hi)":2.43,"MassiveIntentClassification (hu)":24.87,"MassiveIntentClassification (hy)":2.67,"MassiveIntentClassification (id)":32.7,"MassiveIntentClassification (is)":24.13,"MassiveIntentClassification (it)":34.58,"MassiveIntentClassification (ja)":5.99,"MassiveIntentClassification (jv)":27.6,"MassiveIntentClassification (ka)":2.14,"MassiveIntentClassification (km)":4.38,"MassiveIntentClassification (kn)":2.1,"MassiveIntentClassification (ko)":2.36,"MassiveIntentClassification (lv)":22.06,"MassiveIntentClassification (ml)":2.29,"MassiveIntentClassification (mn)":28.51,"MassiveIntentClassification (ms)":28.16,"MassiveIntentClassification (my)":3.97,"MassiveIntentClassification (nl)":30.51,"MassiveIntentClassification (pt)":33.85,"MassiveIntentClassification (ro)":30.47,"MassiveIntentClassification (ru)":58.06,"MassiveIntentClassification (sl)":29.64,"MassiveIntentClassification (sq)":31.7,"MassiveIntentClassification (sw)":27.52,"MassiveIntentClassification (ta)":1.38,"MassiveIntentClassification (te)":2.04,"MassiveIntentClassification (th)":3.79,"MassiveIntentClassification (tl)":31.44,"MassiveIntentClassification (tr)":26.22,"MassiveIntentClassification (ur)":2.55,"MassiveIntentClassification (vi)":23.1,"MassiveIntentClassification (zh-TW)":6.3,"MassiveScenarioClassification (af)":39.37,"MassiveScenarioClassification (am)":7.51,"MassiveScenarioClassification (ar)":11.36,"MassiveScenarioClassification (az)":29.62,"MassiveScenarioClassification (bn)":8.79,"MassiveScenarioClassification (cy)":38.93,"MassiveScenarioClassification (de)":40.66,"MassiveScenarioClassification (el)":16.44,"MassiveScenarioClassification (es)":36.28,"MassiveScenarioClassification (fa)":6.8,"MassiveScenarioClassification (fi)":34.5,"MassiveScenarioClassification (fr)":42.42,"MassiveScenarioClassification (he)":7.95,"MassiveScenarioClassification (hi)":7.51,"MassiveScenarioClassification (hu)":35.04,"MassiveScenarioClassification (hy)":8.53,"MassiveScenarioClassification (id)":39.6,"MassiveScenarioClassification (is)":32.61,"MassiveScenarioClassification (it)":41.2,"MassiveScenarioClassification (ja)":11.21,"MassiveScenarioClassification (jv)":36.25,"MassiveScenarioClassification (ka)":6.59,"MassiveScenarioClassification (km)":8.15,"MassiveScenarioClassification (kn)":8.05,"MassiveScenarioClassification (ko)":5.62,"MassiveScenarioClassification (lv)":28.47,"MassiveScenarioClassification (ml)":7.35,"MassiveScenarioClassification (mn)":33.48,"MassiveScenarioClassification (ms)":38.85,"MassiveScenarioClassification (my)":11.23,"MassiveScenarioClassification (nl)":38.92,"MassiveScenarioClassification (pt)":40.23,"MassiveScenarioClassification (ro)":39.78,"MassiveScenarioClassification (ru)":64.15,"MassiveScenarioClassification (sl)":35.34,"MassiveScenarioClassification (sq)":42.07,"MassiveScenarioClassification (sw)":35.33,"MassiveScenarioClassification (ta)":7.21,"MassiveScenarioClassification (te)":6.86,"MassiveScenarioClassification (th)":8.25,"MassiveScenarioClassification (tl)":38.17,"MassiveScenarioClassification (tr)":33.85,"MassiveScenarioClassification (ur)":8.74,"MassiveScenarioClassification (vi)":31.94,"MassiveScenarioClassification (zh-TW)":11.68} -{"level_0":112,"index":111,"Rank":113,"Model":"AGE_Hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.68,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":113,"index":112,"Rank":114,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.12,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":114,"index":114,"Rank":115,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.46,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":115,"index":115,"Rank":116,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.94,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":116,"index":116,"Rank":117,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":34.25,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":71.83,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":44.53,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":51.93,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":58.31,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":117,"index":117,"Rank":118,"Model":"gte-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":45.82,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":118,"index":118,"Rank":119,"Model":"gte-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.23,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":119,"index":119,"Rank":120,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":38.69,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":120,"index":120,"Rank":121,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":21.96,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":121,"index":121,"Rank":122,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":33.51,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.5,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":53.98,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":61.19,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":70.22,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":122,"index":122,"Rank":123,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":35.09,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.19,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.64,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.8,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":73.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":123,"index":123,"Rank":124,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":26.75,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":43.83,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":19.38,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":13.58,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":23.21,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":124,"index":124,"Rank":125,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":26.62,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":36.77,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":15.37,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":15.82,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":23.92,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":125,"index":125,"Rank":126,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.51,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.33,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":59.01,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.15,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.94,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":126,"index":126,"Rank":127,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.76,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":38.3,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":89.38,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":64.45,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.42,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.11,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} -{"level_0":127,"index":127,"Rank":128,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":48.3,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":1,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":60.56,"AmazonCounterfactualClassification (de)":73.17,"AmazonCounterfactualClassification (ja)":76.42,"AmazonReviewsClassification (de)":39.92,"AmazonReviewsClassification (es)":39.39,"AmazonReviewsClassification (fr)":38.52,"AmazonReviewsClassification (ja)":36.44,"AmazonReviewsClassification (zh)":36.45,"MTOPDomainClassification (de)":86.95,"MTOPDomainClassification (es)":84.07,"MTOPDomainClassification (fr)":84.14,"MTOPDomainClassification (hi)":85.11,"MTOPDomainClassification (th)":81.24,"MTOPIntentClassification (de)":63.42,"MTOPIntentClassification (es)":64.44,"MTOPIntentClassification (fr)":62.01,"MTOPIntentClassification (hi)":62.58,"MTOPIntentClassification (th)":64.61,"MassiveIntentClassification (af)":56.12,"MassiveIntentClassification (am)":55.71,"MassiveIntentClassification (ar)":50.86,"MassiveIntentClassification (az)":58.97,"MassiveIntentClassification (bn)":58.22,"MassiveIntentClassification (cy)":50.16,"MassiveIntentClassification (de)":56.21,"MassiveIntentClassification (el)":57.03,"MassiveIntentClassification (es)":58.32,"MassiveIntentClassification (fa)":62.33,"MassiveIntentClassification (fi)":60.12,"MassiveIntentClassification (fr)":60.47,"MassiveIntentClassification (he)":56.55,"MassiveIntentClassification (hi)":59.4,"MassiveIntentClassification (hu)":59.52,"MassiveIntentClassification (hy)":56.2,"MassiveIntentClassification (id)":61.12,"MassiveIntentClassification (is)":54.9,"MassiveIntentClassification (it)":59.83,"MassiveIntentClassification (ja)":63.11,"MassiveIntentClassification (jv)":50.98,"MassiveIntentClassification (ka)":48.35,"MassiveIntentClassification (km)":48.55,"MassiveIntentClassification (kn)":56.24,"MassiveIntentClassification (ko)":60.99,"MassiveIntentClassification (lv)":57.1,"MassiveIntentClassification (ml)":57.91,"MassiveIntentClassification (mn)":58.5,"MassiveIntentClassification (ms)":58.6,"MassiveIntentClassification (my)":57.35,"MassiveIntentClassification (nl)":59.37,"MassiveIntentClassification (pt)":60.16,"MassiveIntentClassification (ro)":57.92,"MassiveIntentClassification (ru)":60.67,"MassiveIntentClassification (sl)":59.37,"MassiveIntentClassification (sq)":58.03,"MassiveIntentClassification (sw)":51.62,"MassiveIntentClassification (ta)":55.04,"MassiveIntentClassification (te)":58.32,"MassiveIntentClassification (th)":56.58,"MassiveIntentClassification (tl)":55.28,"MassiveIntentClassification (tr)":60.91,"MassiveIntentClassification (ur)":56.7,"MassiveIntentClassification (vi)":56.67,"MassiveIntentClassification (zh-TW)":59.51,"MassiveScenarioClassification (af)":63.39,"MassiveScenarioClassification (am)":62.02,"MassiveScenarioClassification (ar)":57.72,"MassiveScenarioClassification (az)":63.48,"MassiveScenarioClassification (bn)":61.84,"MassiveScenarioClassification (cy)":56.13,"MassiveScenarioClassification (de)":62.39,"MassiveScenarioClassification (el)":64.58,"MassiveScenarioClassification (es)":63.61,"MassiveScenarioClassification (fa)":67.46,"MassiveScenarioClassification (fi)":64.58,"MassiveScenarioClassification (fr)":65.1,"MassiveScenarioClassification (he)":63.53,"MassiveScenarioClassification (hi)":64.4,"MassiveScenarioClassification (hu)":65.82,"MassiveScenarioClassification (hy)":61.25,"MassiveScenarioClassification (id)":65.84,"MassiveScenarioClassification (is)":61.94,"MassiveScenarioClassification (it)":64.09,"MassiveScenarioClassification (ja)":67.72,"MassiveScenarioClassification (jv)":58.29,"MassiveScenarioClassification (ka)":53.38,"MassiveScenarioClassification (km)":56.18,"MassiveScenarioClassification (kn)":61.74,"MassiveScenarioClassification (ko)":67.26,"MassiveScenarioClassification (lv)":61.87,"MassiveScenarioClassification (ml)":62.26,"MassiveScenarioClassification (mn)":62.6,"MassiveScenarioClassification (ms)":65.63,"MassiveScenarioClassification (my)":62.94,"MassiveScenarioClassification (nl)":65.16,"MassiveScenarioClassification (pt)":63.28,"MassiveScenarioClassification (ro)":62.41,"MassiveScenarioClassification (ru)":65.25,"MassiveScenarioClassification (sl)":64.25,"MassiveScenarioClassification (sq)":64.54,"MassiveScenarioClassification (sw)":58.36,"MassiveScenarioClassification (ta)":59.08,"MassiveScenarioClassification (te)":64.13,"MassiveScenarioClassification (th)":64.34,"MassiveScenarioClassification (tl)":60.23,"MassiveScenarioClassification (tr)":65.43,"MassiveScenarioClassification (ur)":61.52,"MassiveScenarioClassification (vi)":61.05,"MassiveScenarioClassification (zh-TW)":67.08} +{"Rank":2,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":44.78,"AmazonCounterfactualClassification (de)":68.14,"AmazonCounterfactualClassification (ja)":65.39,"AmazonReviewsClassification (de)":35.03,"AmazonReviewsClassification (es)":36.24,"AmazonReviewsClassification (fr)":35.7,"AmazonReviewsClassification (ja)":31.08,"AmazonReviewsClassification (zh)":33.89,"MTOPDomainClassification (de)":86.19,"MTOPDomainClassification (es)":87.75,"MTOPDomainClassification (fr)":84.61,"MTOPDomainClassification (hi)":76.41,"MTOPDomainClassification (th)":73.62,"MTOPIntentClassification (de)":59.21,"MTOPIntentClassification (es)":57.21,"MTOPIntentClassification (fr)":53.41,"MTOPIntentClassification (hi)":45.54,"MTOPIntentClassification (th)":47.73,"MassiveIntentClassification (af)":40.02,"MassiveIntentClassification (am)":2.35,"MassiveIntentClassification (ar)":43.14,"MassiveIntentClassification (az)":25.6,"MassiveIntentClassification (bn)":4.84,"MassiveIntentClassification (cy)":15.43,"MassiveIntentClassification (de)":51.57,"MassiveIntentClassification (el)":49.65,"MassiveIntentClassification (es)":56.57,"MassiveIntentClassification (fa)":55.36,"MassiveIntentClassification (fi)":45.72,"MassiveIntentClassification (fr)":57.02,"MassiveIntentClassification (he)":46.74,"MassiveIntentClassification (hi)":48.55,"MassiveIntentClassification (hu)":50.65,"MassiveIntentClassification (hy)":40.79,"MassiveIntentClassification (id)":56.0,"MassiveIntentClassification (is)":16.08,"MassiveIntentClassification (it)":57.65,"MassiveIntentClassification (ja)":55.33,"MassiveIntentClassification (jv)":28.16,"MassiveIntentClassification (ka)":29.41,"MassiveIntentClassification (km)":4.79,"MassiveIntentClassification (kn)":3.37,"MassiveIntentClassification (ko)":49.97,"MassiveIntentClassification (lv)":44.31,"MassiveIntentClassification (ml)":3.24,"MassiveIntentClassification (mn)":40.37,"MassiveIntentClassification (ms)":47.97,"MassiveIntentClassification (my)":38.48,"MassiveIntentClassification (nl)":58.29,"MassiveIntentClassification (pt)":58.63,"MassiveIntentClassification (ro)":50.63,"MassiveIntentClassification (ru)":57.96,"MassiveIntentClassification (sl)":50.66,"MassiveIntentClassification (sq)":50.25,"MassiveIntentClassification (sw)":19.29,"MassiveIntentClassification (ta)":3.79,"MassiveIntentClassification (te)":3.36,"MassiveIntentClassification (th)":45.28,"MassiveIntentClassification (tl)":28.44,"MassiveIntentClassification (tr)":50.47,"MassiveIntentClassification (ur)":46.03,"MassiveIntentClassification (vi)":45.25,"MassiveIntentClassification (zh-TW)":54.96,"MassiveScenarioClassification (af)":53.67,"MassiveScenarioClassification (am)":7.72,"MassiveScenarioClassification (ar)":52.19,"MassiveScenarioClassification (az)":34.75,"MassiveScenarioClassification (bn)":10.65,"MassiveScenarioClassification (cy)":21.24,"MassiveScenarioClassification (de)":61.4,"MassiveScenarioClassification (el)":60.68,"MassiveScenarioClassification (es)":64.61,"MassiveScenarioClassification (fa)":59.24,"MassiveScenarioClassification (fi)":54.66,"MassiveScenarioClassification (fr)":65.2,"MassiveScenarioClassification (he)":54.74,"MassiveScenarioClassification (hi)":55.99,"MassiveScenarioClassification (hu)":61.2,"MassiveScenarioClassification (hy)":49.63,"MassiveScenarioClassification (id)":65.25,"MassiveScenarioClassification (is)":22.6,"MassiveScenarioClassification (it)":64.63,"MassiveScenarioClassification (ja)":62.32,"MassiveScenarioClassification (jv)":35.77,"MassiveScenarioClassification (ka)":39.08,"MassiveScenarioClassification (km)":9.24,"MassiveScenarioClassification (kn)":8.28,"MassiveScenarioClassification (ko)":57.6,"MassiveScenarioClassification (lv)":51.72,"MassiveScenarioClassification (ml)":8.25,"MassiveScenarioClassification (mn)":47.21,"MassiveScenarioClassification (ms)":55.65,"MassiveScenarioClassification (my)":43.31,"MassiveScenarioClassification (nl)":67.49,"MassiveScenarioClassification (pt)":64.26,"MassiveScenarioClassification (ro)":58.03,"MassiveScenarioClassification (ru)":65.41,"MassiveScenarioClassification (sl)":59.36,"MassiveScenarioClassification (sq)":62.69,"MassiveScenarioClassification (sw)":25.12,"MassiveScenarioClassification (ta)":8.67,"MassiveScenarioClassification (te)":7.82,"MassiveScenarioClassification (th)":54.65,"MassiveScenarioClassification (tl)":36.09,"MassiveScenarioClassification (tr)":60.89,"MassiveScenarioClassification (ur)":54.71,"MassiveScenarioClassification (vi)":55.15,"MassiveScenarioClassification (zh-TW)":62.89} +{"Rank":3,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":42.45,"AmazonCounterfactualClassification (de)":67.82,"AmazonCounterfactualClassification (ja)":68.76,"AmazonReviewsClassification (de)":31.07,"AmazonReviewsClassification (es)":32.72,"AmazonReviewsClassification (fr)":31.12,"AmazonReviewsClassification (ja)":28.94,"AmazonReviewsClassification (zh)":30.89,"MTOPDomainClassification (de)":74.08,"MTOPDomainClassification (es)":73.47,"MTOPDomainClassification (fr)":72.26,"MTOPDomainClassification (hi)":72.95,"MTOPDomainClassification (th)":72.68,"MTOPIntentClassification (de)":51.62,"MTOPIntentClassification (es)":52.75,"MTOPIntentClassification (fr)":50.12,"MTOPIntentClassification (hi)":45.55,"MTOPIntentClassification (th)":50.07,"MassiveIntentClassification (af)":38.01,"MassiveIntentClassification (am)":12.7,"MassiveIntentClassification (ar)":37.16,"MassiveIntentClassification (az)":19.98,"MassiveIntentClassification (bn)":42.51,"MassiveIntentClassification (cy)":17.33,"MassiveIntentClassification (de)":44.79,"MassiveIntentClassification (el)":46.71,"MassiveIntentClassification (es)":45.44,"MassiveIntentClassification (fa)":45.01,"MassiveIntentClassification (fi)":45.94,"MassiveIntentClassification (fr)":46.13,"MassiveIntentClassification (he)":42.55,"MassiveIntentClassification (hi)":40.2,"MassiveIntentClassification (hu)":42.77,"MassiveIntentClassification (hy)":28.07,"MassiveIntentClassification (id)":45.81,"MassiveIntentClassification (is)":39.86,"MassiveIntentClassification (it)":48.25,"MassiveIntentClassification (ja)":45.3,"MassiveIntentClassification (jv)":24.3,"MassiveIntentClassification (ka)":22.7,"MassiveIntentClassification (km)":22.48,"MassiveIntentClassification (kn)":4.32,"MassiveIntentClassification (ko)":44.26,"MassiveIntentClassification (lv)":39.75,"MassiveIntentClassification (ml)":41.33,"MassiveIntentClassification (mn)":16.2,"MassiveIntentClassification (ms)":43.23,"MassiveIntentClassification (my)":25.37,"MassiveIntentClassification (nl)":45.0,"MassiveIntentClassification (pt)":48.55,"MassiveIntentClassification (ro)":44.3,"MassiveIntentClassification (ru)":44.29,"MassiveIntentClassification (sl)":44.72,"MassiveIntentClassification (sq)":46.12,"MassiveIntentClassification (sw)":31.89,"MassiveIntentClassification (ta)":29.63,"MassiveIntentClassification (te)":36.03,"MassiveIntentClassification (th)":43.39,"MassiveIntentClassification (tl)":29.73,"MassiveIntentClassification (tr)":43.93,"MassiveIntentClassification (ur)":26.11,"MassiveIntentClassification (vi)":44.33,"MassiveIntentClassification (zh-TW)":32.93,"MassiveScenarioClassification (af)":47.1,"MassiveScenarioClassification (am)":17.7,"MassiveScenarioClassification (ar)":45.21,"MassiveScenarioClassification (az)":28.21,"MassiveScenarioClassification (bn)":50.52,"MassiveScenarioClassification (cy)":22.58,"MassiveScenarioClassification (de)":54.34,"MassiveScenarioClassification (el)":55.47,"MassiveScenarioClassification (es)":52.77,"MassiveScenarioClassification (fa)":52.5,"MassiveScenarioClassification (fi)":52.63,"MassiveScenarioClassification (fr)":54.32,"MassiveScenarioClassification (he)":52.41,"MassiveScenarioClassification (hi)":47.37,"MassiveScenarioClassification (hu)":53.43,"MassiveScenarioClassification (hy)":33.57,"MassiveScenarioClassification (id)":54.38,"MassiveScenarioClassification (is)":49.78,"MassiveScenarioClassification (it)":54.84,"MassiveScenarioClassification (ja)":54.12,"MassiveScenarioClassification (jv)":32.71,"MassiveScenarioClassification (ka)":26.92,"MassiveScenarioClassification (km)":27.23,"MassiveScenarioClassification (kn)":10.06,"MassiveScenarioClassification (ko)":52.01,"MassiveScenarioClassification (lv)":44.82,"MassiveScenarioClassification (ml)":49.1,"MassiveScenarioClassification (mn)":21.51,"MassiveScenarioClassification (ms)":53.6,"MassiveScenarioClassification (my)":29.72,"MassiveScenarioClassification (nl)":53.33,"MassiveScenarioClassification (pt)":53.41,"MassiveScenarioClassification (ro)":50.48,"MassiveScenarioClassification (ru)":51.84,"MassiveScenarioClassification (sl)":51.29,"MassiveScenarioClassification (sq)":55.65,"MassiveScenarioClassification (sw)":42.04,"MassiveScenarioClassification (ta)":36.72,"MassiveScenarioClassification (te)":42.08,"MassiveScenarioClassification (th)":52.15,"MassiveScenarioClassification (tl)":37.34,"MassiveScenarioClassification (tr)":52.56,"MassiveScenarioClassification (ur)":32.6,"MassiveScenarioClassification (vi)":50.97,"MassiveScenarioClassification (zh-TW)":42.32} +{"Rank":4,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":31.93,"AmazonCounterfactualClassification (de)":57.1,"AmazonCounterfactualClassification (ja)":59.91,"AmazonReviewsClassification (de)":25.91,"AmazonReviewsClassification (es)":27.63,"AmazonReviewsClassification (fr)":27.54,"AmazonReviewsClassification (ja)":23.57,"AmazonReviewsClassification (zh)":22.99,"MTOPDomainClassification (de)":72.04,"MTOPDomainClassification (es)":72.99,"MTOPDomainClassification (fr)":75.59,"MTOPDomainClassification (hi)":40.36,"MTOPDomainClassification (th)":17.1,"MTOPIntentClassification (de)":43.41,"MTOPIntentClassification (es)":41.88,"MTOPIntentClassification (fr)":38.94,"MTOPIntentClassification (hi)":17.75,"MTOPIntentClassification (th)":5.63,"MassiveIntentClassification (af)":38.94,"MassiveIntentClassification (am)":2.45,"MassiveIntentClassification (ar)":20.94,"MassiveIntentClassification (az)":34.25,"MassiveIntentClassification (bn)":13.67,"MassiveIntentClassification (cy)":35.71,"MassiveIntentClassification (de)":44.17,"MassiveIntentClassification (el)":28.7,"MassiveIntentClassification (es)":40.91,"MassiveIntentClassification (fa)":23.52,"MassiveIntentClassification (fi)":39.27,"MassiveIntentClassification (fr)":44.82,"MassiveIntentClassification (he)":23.65,"MassiveIntentClassification (hi)":17.98,"MassiveIntentClassification (hu)":38.0,"MassiveIntentClassification (hy)":8.69,"MassiveIntentClassification (id)":39.66,"MassiveIntentClassification (is)":35.14,"MassiveIntentClassification (it)":43.17,"MassiveIntentClassification (ja)":30.94,"MassiveIntentClassification (jv)":36.69,"MassiveIntentClassification (ka)":9.17,"MassiveIntentClassification (km)":4.99,"MassiveIntentClassification (kn)":3.08,"MassiveIntentClassification (ko)":19.97,"MassiveIntentClassification (lv)":38.61,"MassiveIntentClassification (ml)":2.85,"MassiveIntentClassification (mn)":23.25,"MassiveIntentClassification (ms)":36.21,"MassiveIntentClassification (my)":4.38,"MassiveIntentClassification (nl)":41.85,"MassiveIntentClassification (pt)":45.12,"MassiveIntentClassification (ro)":41.71,"MassiveIntentClassification (ru)":26.33,"MassiveIntentClassification (sl)":38.52,"MassiveIntentClassification (sq)":41.62,"MassiveIntentClassification (sw)":35.28,"MassiveIntentClassification (ta)":13.1,"MassiveIntentClassification (te)":2.56,"MassiveIntentClassification (th)":10.54,"MassiveIntentClassification (tl)":38.56,"MassiveIntentClassification (tr)":35.9,"MassiveIntentClassification (ur)":16.18,"MassiveIntentClassification (vi)":37.38,"MassiveIntentClassification (zh-TW)":22.39,"MassiveScenarioClassification (af)":45.71,"MassiveScenarioClassification (am)":7.41,"MassiveScenarioClassification (ar)":27.62,"MassiveScenarioClassification (az)":39.58,"MassiveScenarioClassification (bn)":18.98,"MassiveScenarioClassification (cy)":41.4,"MassiveScenarioClassification (de)":52.07,"MassiveScenarioClassification (el)":35.51,"MassiveScenarioClassification (es)":50.74,"MassiveScenarioClassification (fa)":29.0,"MassiveScenarioClassification (fi)":45.8,"MassiveScenarioClassification (fr)":53.76,"MassiveScenarioClassification (he)":25.68,"MassiveScenarioClassification (hi)":23.02,"MassiveScenarioClassification (hu)":44.09,"MassiveScenarioClassification (hy)":14.83,"MassiveScenarioClassification (id)":44.35,"MassiveScenarioClassification (is)":43.08,"MassiveScenarioClassification (it)":51.71,"MassiveScenarioClassification (ja)":36.75,"MassiveScenarioClassification (jv)":44.57,"MassiveScenarioClassification (ka)":14.84,"MassiveScenarioClassification (km)":9.75,"MassiveScenarioClassification (kn)":8.32,"MassiveScenarioClassification (ko)":25.72,"MassiveScenarioClassification (lv)":42.75,"MassiveScenarioClassification (ml)":7.25,"MassiveScenarioClassification (mn)":29.03,"MassiveScenarioClassification (ms)":44.65,"MassiveScenarioClassification (my)":10.07,"MassiveScenarioClassification (nl)":49.15,"MassiveScenarioClassification (pt)":53.0,"MassiveScenarioClassification (ro)":49.97,"MassiveScenarioClassification (ru)":28.75,"MassiveScenarioClassification (sl)":42.26,"MassiveScenarioClassification (sq)":49.14,"MassiveScenarioClassification (sw)":43.18,"MassiveScenarioClassification (ta)":19.38,"MassiveScenarioClassification (te)":7.74,"MassiveScenarioClassification (th)":18.32,"MassiveScenarioClassification (tl)":48.31,"MassiveScenarioClassification (tr)":41.79,"MassiveScenarioClassification (ur)":24.46,"MassiveScenarioClassification (vi)":40.94,"MassiveScenarioClassification (zh-TW)":31.16} +{"Rank":5,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":30.51,"AmazonCounterfactualClassification (de)":59.79,"AmazonCounterfactualClassification (ja)":50.59,"AmazonReviewsClassification (de)":35.06,"AmazonReviewsClassification (es)":37.18,"AmazonReviewsClassification (fr)":35.48,"AmazonReviewsClassification (ja)":22.24,"AmazonReviewsClassification (zh)":21.89,"MTOPDomainClassification (de)":85.42,"MTOPDomainClassification (es)":88.2,"MTOPDomainClassification (fr)":85.05,"MTOPDomainClassification (hi)":21.74,"MTOPDomainClassification (th)":15.87,"MTOPIntentClassification (de)":55.75,"MTOPIntentClassification (es)":57.73,"MTOPIntentClassification (fr)":51.07,"MTOPIntentClassification (hi)":3.19,"MTOPIntentClassification (th)":5.55,"MassiveIntentClassification (af)":42.6,"MassiveIntentClassification (am)":2.12,"MassiveIntentClassification (ar)":4.64,"MassiveIntentClassification (az)":35.05,"MassiveIntentClassification (bn)":2.84,"MassiveIntentClassification (cy)":36.19,"MassiveIntentClassification (de)":55.49,"MassiveIntentClassification (el)":10.14,"MassiveIntentClassification (es)":56.72,"MassiveIntentClassification (fa)":3.54,"MassiveIntentClassification (fi)":37.13,"MassiveIntentClassification (fr)":57.67,"MassiveIntentClassification (he)":2.56,"MassiveIntentClassification (hi)":3.24,"MassiveIntentClassification (hu)":34.22,"MassiveIntentClassification (hy)":3.01,"MassiveIntentClassification (id)":46.54,"MassiveIntentClassification (is)":34.77,"MassiveIntentClassification (it)":54.13,"MassiveIntentClassification (ja)":4.27,"MassiveIntentClassification (jv)":36.97,"MassiveIntentClassification (ka)":2.72,"MassiveIntentClassification (km)":5.35,"MassiveIntentClassification (kn)":3.17,"MassiveIntentClassification (ko)":2.64,"MassiveIntentClassification (lv)":36.32,"MassiveIntentClassification (ml)":3.18,"MassiveIntentClassification (mn)":22.85,"MassiveIntentClassification (ms)":42.87,"MassiveIntentClassification (my)":4.04,"MassiveIntentClassification (nl)":49.53,"MassiveIntentClassification (pt)":57.03,"MassiveIntentClassification (ro)":49.95,"MassiveIntentClassification (ru)":36.58,"MassiveIntentClassification (sl)":39.44,"MassiveIntentClassification (sq)":41.78,"MassiveIntentClassification (sw)":35.85,"MassiveIntentClassification (ta)":2.32,"MassiveIntentClassification (te)":2.2,"MassiveIntentClassification (th)":3.74,"MassiveIntentClassification (tl)":43.12,"MassiveIntentClassification (tr)":35.24,"MassiveIntentClassification (ur)":3.0,"MassiveIntentClassification (vi)":30.01,"MassiveIntentClassification (zh-TW)":3.35,"MassiveScenarioClassification (af)":52.54,"MassiveScenarioClassification (am)":6.3,"MassiveScenarioClassification (ar)":11.96,"MassiveScenarioClassification (az)":40.17,"MassiveScenarioClassification (bn)":8.29,"MassiveScenarioClassification (cy)":42.24,"MassiveScenarioClassification (de)":68.09,"MassiveScenarioClassification (el)":16.66,"MassiveScenarioClassification (es)":64.32,"MassiveScenarioClassification (fa)":6.9,"MassiveScenarioClassification (fi)":43.96,"MassiveScenarioClassification (fr)":66.72,"MassiveScenarioClassification (he)":7.51,"MassiveScenarioClassification (hi)":7.82,"MassiveScenarioClassification (hu)":42.16,"MassiveScenarioClassification (hy)":9.33,"MassiveScenarioClassification (id)":53.54,"MassiveScenarioClassification (is)":42.84,"MassiveScenarioClassification (it)":62.44,"MassiveScenarioClassification (ja)":7.29,"MassiveScenarioClassification (jv)":43.13,"MassiveScenarioClassification (ka)":7.63,"MassiveScenarioClassification (km)":9.08,"MassiveScenarioClassification (kn)":8.1,"MassiveScenarioClassification (ko)":6.35,"MassiveScenarioClassification (lv)":40.24,"MassiveScenarioClassification (ml)":7.65,"MassiveScenarioClassification (mn)":27.98,"MassiveScenarioClassification (ms)":52.41,"MassiveScenarioClassification (my)":9.21,"MassiveScenarioClassification (nl)":60.35,"MassiveScenarioClassification (pt)":62.78,"MassiveScenarioClassification (ro)":59.62,"MassiveScenarioClassification (ru)":43.44,"MassiveScenarioClassification (sl)":44.79,"MassiveScenarioClassification (sq)":50.84,"MassiveScenarioClassification (sw)":44.63,"MassiveScenarioClassification (ta)":7.95,"MassiveScenarioClassification (te)":7.5,"MassiveScenarioClassification (th)":8.79,"MassiveScenarioClassification (tl)":53.54,"MassiveScenarioClassification (tr)":42.47,"MassiveScenarioClassification (ur)":9.58,"MassiveScenarioClassification (vi)":34.68,"MassiveScenarioClassification (zh-TW)":8.77} +{"Rank":6,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":29.6,"AmazonCounterfactualClassification (de)":67.01,"AmazonCounterfactualClassification (ja)":45.61,"AmazonReviewsClassification (de)":44.05,"AmazonReviewsClassification (es)":45.01,"AmazonReviewsClassification (fr)":43.52,"AmazonReviewsClassification (ja)":22.23,"AmazonReviewsClassification (zh)":21.88,"MTOPDomainClassification (de)":83.28,"MTOPDomainClassification (es)":85.32,"MTOPDomainClassification (fr)":85.14,"MTOPDomainClassification (hi)":20.85,"MTOPDomainClassification (th)":15.62,"MTOPIntentClassification (de)":54.65,"MTOPIntentClassification (es)":57.38,"MTOPIntentClassification (fr)":54.39,"MTOPIntentClassification (hi)":3.28,"MTOPIntentClassification (th)":5.08,"MassiveIntentClassification (af)":40.17,"MassiveIntentClassification (am)":2.18,"MassiveIntentClassification (ar)":4.18,"MassiveIntentClassification (az)":30.02,"MassiveIntentClassification (bn)":2.6,"MassiveIntentClassification (cy)":29.15,"MassiveIntentClassification (de)":57.43,"MassiveIntentClassification (el)":9.96,"MassiveIntentClassification (es)":57.97,"MassiveIntentClassification (fa)":3.6,"MassiveIntentClassification (fi)":34.02,"MassiveIntentClassification (fr)":60.99,"MassiveIntentClassification (he)":2.51,"MassiveIntentClassification (hi)":3.02,"MassiveIntentClassification (hu)":31.66,"MassiveIntentClassification (hy)":3.32,"MassiveIntentClassification (id)":41.53,"MassiveIntentClassification (is)":30.25,"MassiveIntentClassification (it)":56.57,"MassiveIntentClassification (ja)":3.5,"MassiveIntentClassification (jv)":31.67,"MassiveIntentClassification (ka)":2.79,"MassiveIntentClassification (km)":5.43,"MassiveIntentClassification (kn)":2.79,"MassiveIntentClassification (ko)":2.67,"MassiveIntentClassification (lv)":34.25,"MassiveIntentClassification (ml)":2.98,"MassiveIntentClassification (mn)":20.99,"MassiveIntentClassification (ms)":37.43,"MassiveIntentClassification (my)":4.02,"MassiveIntentClassification (nl)":50.51,"MassiveIntentClassification (pt)":57.95,"MassiveIntentClassification (ro)":49.37,"MassiveIntentClassification (ru)":33.46,"MassiveIntentClassification (sl)":36.33,"MassiveIntentClassification (sq)":37.65,"MassiveIntentClassification (sw)":30.6,"MassiveIntentClassification (ta)":1.79,"MassiveIntentClassification (te)":2.26,"MassiveIntentClassification (th)":4.02,"MassiveIntentClassification (tl)":38.92,"MassiveIntentClassification (tr)":32.05,"MassiveIntentClassification (ur)":2.7,"MassiveIntentClassification (vi)":21.47,"MassiveIntentClassification (zh-TW)":3.24,"MassiveScenarioClassification (af)":50.81,"MassiveScenarioClassification (am)":6.95,"MassiveScenarioClassification (ar)":12.32,"MassiveScenarioClassification (az)":38.79,"MassiveScenarioClassification (bn)":8.0,"MassiveScenarioClassification (cy)":33.91,"MassiveScenarioClassification (de)":65.33,"MassiveScenarioClassification (el)":16.89,"MassiveScenarioClassification (es)":62.52,"MassiveScenarioClassification (fa)":6.08,"MassiveScenarioClassification (fi)":43.34,"MassiveScenarioClassification (fr)":66.42,"MassiveScenarioClassification (he)":7.55,"MassiveScenarioClassification (hi)":7.44,"MassiveScenarioClassification (hu)":40.85,"MassiveScenarioClassification (hy)":9.25,"MassiveScenarioClassification (id)":51.92,"MassiveScenarioClassification (is)":40.09,"MassiveScenarioClassification (it)":62.94,"MassiveScenarioClassification (ja)":7.9,"MassiveScenarioClassification (jv)":41.33,"MassiveScenarioClassification (ka)":7.76,"MassiveScenarioClassification (km)":9.19,"MassiveScenarioClassification (kn)":8.36,"MassiveScenarioClassification (ko)":6.13,"MassiveScenarioClassification (lv)":40.7,"MassiveScenarioClassification (ml)":6.98,"MassiveScenarioClassification (mn)":27.0,"MassiveScenarioClassification (ms)":46.9,"MassiveScenarioClassification (my)":9.55,"MassiveScenarioClassification (nl)":59.65,"MassiveScenarioClassification (pt)":62.18,"MassiveScenarioClassification (ro)":58.22,"MassiveScenarioClassification (ru)":40.73,"MassiveScenarioClassification (sl)":43.66,"MassiveScenarioClassification (sq)":49.25,"MassiveScenarioClassification (sw)":40.55,"MassiveScenarioClassification (ta)":7.46,"MassiveScenarioClassification (te)":7.03,"MassiveScenarioClassification (th)":8.52,"MassiveScenarioClassification (tl)":51.74,"MassiveScenarioClassification (tr)":43.01,"MassiveScenarioClassification (ur)":9.61,"MassiveScenarioClassification (vi)":28.91,"MassiveScenarioClassification (zh-TW)":7.14} +{"Rank":7,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":28.9,"AmazonCounterfactualClassification (de)":59.38,"AmazonCounterfactualClassification (ja)":45.87,"AmazonReviewsClassification (de)":33.06,"AmazonReviewsClassification (es)":34.0,"AmazonReviewsClassification (fr)":33.48,"AmazonReviewsClassification (ja)":21.78,"AmazonReviewsClassification (zh)":21.83,"MTOPDomainClassification (de)":81.91,"MTOPDomainClassification (es)":84.7,"MTOPDomainClassification (fr)":82.48,"MTOPDomainClassification (hi)":22.11,"MTOPDomainClassification (th)":16.36,"MTOPIntentClassification (de)":52.13,"MTOPIntentClassification (es)":52.62,"MTOPIntentClassification (fr)":46.39,"MTOPIntentClassification (hi)":3.9,"MTOPIntentClassification (th)":5.38,"MassiveIntentClassification (af)":41.02,"MassiveIntentClassification (am)":2.34,"MassiveIntentClassification (ar)":4.87,"MassiveIntentClassification (az)":34.92,"MassiveIntentClassification (bn)":2.52,"MassiveIntentClassification (cy)":35.87,"MassiveIntentClassification (de)":51.48,"MassiveIntentClassification (el)":10.0,"MassiveIntentClassification (es)":53.3,"MassiveIntentClassification (fa)":3.59,"MassiveIntentClassification (fi)":37.35,"MassiveIntentClassification (fr)":54.83,"MassiveIntentClassification (he)":2.52,"MassiveIntentClassification (hi)":2.88,"MassiveIntentClassification (hu)":33.52,"MassiveIntentClassification (hy)":3.13,"MassiveIntentClassification (id)":40.11,"MassiveIntentClassification (is)":34.77,"MassiveIntentClassification (it)":51.21,"MassiveIntentClassification (ja)":4.75,"MassiveIntentClassification (jv)":35.6,"MassiveIntentClassification (ka)":2.71,"MassiveIntentClassification (km)":5.48,"MassiveIntentClassification (kn)":2.44,"MassiveIntentClassification (ko)":2.59,"MassiveIntentClassification (lv)":38.15,"MassiveIntentClassification (ml)":2.67,"MassiveIntentClassification (mn)":18.47,"MassiveIntentClassification (ms)":35.58,"MassiveIntentClassification (my)":4.35,"MassiveIntentClassification (nl)":45.96,"MassiveIntentClassification (pt)":52.27,"MassiveIntentClassification (ro)":46.39,"MassiveIntentClassification (ru)":16.82,"MassiveIntentClassification (sl)":37.3,"MassiveIntentClassification (sq)":41.73,"MassiveIntentClassification (sw)":35.97,"MassiveIntentClassification (ta)":1.52,"MassiveIntentClassification (te)":2.57,"MassiveIntentClassification (th)":3.94,"MassiveIntentClassification (tl)":41.03,"MassiveIntentClassification (tr)":33.75,"MassiveIntentClassification (ur)":2.57,"MassiveIntentClassification (vi)":25.23,"MassiveIntentClassification (zh-TW)":4.64,"MassiveScenarioClassification (af)":51.48,"MassiveScenarioClassification (am)":7.74,"MassiveScenarioClassification (ar)":12.03,"MassiveScenarioClassification (az)":41.77,"MassiveScenarioClassification (bn)":8.07,"MassiveScenarioClassification (cy)":43.67,"MassiveScenarioClassification (de)":63.63,"MassiveScenarioClassification (el)":16.83,"MassiveScenarioClassification (es)":61.48,"MassiveScenarioClassification (fa)":6.48,"MassiveScenarioClassification (fi)":43.54,"MassiveScenarioClassification (fr)":64.06,"MassiveScenarioClassification (he)":8.03,"MassiveScenarioClassification (hi)":7.5,"MassiveScenarioClassification (hu)":42.59,"MassiveScenarioClassification (hy)":9.22,"MassiveScenarioClassification (id)":48.67,"MassiveScenarioClassification (is)":43.87,"MassiveScenarioClassification (it)":59.83,"MassiveScenarioClassification (ja)":5.62,"MassiveScenarioClassification (jv)":42.18,"MassiveScenarioClassification (ka)":7.52,"MassiveScenarioClassification (km)":9.55,"MassiveScenarioClassification (kn)":8.34,"MassiveScenarioClassification (ko)":6.11,"MassiveScenarioClassification (lv)":43.35,"MassiveScenarioClassification (ml)":7.28,"MassiveScenarioClassification (mn)":23.94,"MassiveScenarioClassification (ms)":45.18,"MassiveScenarioClassification (my)":9.33,"MassiveScenarioClassification (nl)":57.02,"MassiveScenarioClassification (pt)":59.45,"MassiveScenarioClassification (ro)":56.8,"MassiveScenarioClassification (ru)":25.85,"MassiveScenarioClassification (sl)":42.51,"MassiveScenarioClassification (sq)":50.41,"MassiveScenarioClassification (sw)":43.02,"MassiveScenarioClassification (ta)":7.21,"MassiveScenarioClassification (te)":6.9,"MassiveScenarioClassification (th)":8.7,"MassiveScenarioClassification (tl)":51.76,"MassiveScenarioClassification (tr)":42.54,"MassiveScenarioClassification (ur)":9.32,"MassiveScenarioClassification (vi)":31.51,"MassiveScenarioClassification (zh-TW)":8.16} +{"Rank":8,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":28.26,"AmazonCounterfactualClassification (de)":67.97,"AmazonCounterfactualClassification (ja)":45.72,"AmazonReviewsClassification (de)":43.16,"AmazonReviewsClassification (es)":42.89,"AmazonReviewsClassification (fr)":41.48,"AmazonReviewsClassification (ja)":22.49,"AmazonReviewsClassification (zh)":22.12,"MTOPDomainClassification (de)":80.56,"MTOPDomainClassification (es)":80.78,"MTOPDomainClassification (fr)":79.6,"MTOPDomainClassification (hi)":21.22,"MTOPDomainClassification (th)":15.82,"MTOPIntentClassification (de)":52.5,"MTOPIntentClassification (es)":52.07,"MTOPIntentClassification (fr)":47.73,"MTOPIntentClassification (hi)":3.74,"MTOPIntentClassification (th)":4.96,"MassiveIntentClassification (af)":38.41,"MassiveIntentClassification (am)":2.49,"MassiveIntentClassification (ar)":4.7,"MassiveIntentClassification (az)":31.77,"MassiveIntentClassification (bn)":2.77,"MassiveIntentClassification (cy)":31.69,"MassiveIntentClassification (de)":52.01,"MassiveIntentClassification (el)":9.74,"MassiveIntentClassification (es)":54.1,"MassiveIntentClassification (fa)":3.86,"MassiveIntentClassification (fi)":34.07,"MassiveIntentClassification (fr)":57.01,"MassiveIntentClassification (he)":2.14,"MassiveIntentClassification (hi)":2.97,"MassiveIntentClassification (hu)":32.01,"MassiveIntentClassification (hy)":3.17,"MassiveIntentClassification (id)":34.55,"MassiveIntentClassification (is)":32.0,"MassiveIntentClassification (it)":52.94,"MassiveIntentClassification (ja)":2.9,"MassiveIntentClassification (jv)":32.42,"MassiveIntentClassification (ka)":2.71,"MassiveIntentClassification (km)":5.5,"MassiveIntentClassification (kn)":2.41,"MassiveIntentClassification (ko)":2.57,"MassiveIntentClassification (lv)":35.09,"MassiveIntentClassification (ml)":2.95,"MassiveIntentClassification (mn)":18.33,"MassiveIntentClassification (ms)":29.69,"MassiveIntentClassification (my)":3.99,"MassiveIntentClassification (nl)":44.95,"MassiveIntentClassification (pt)":51.96,"MassiveIntentClassification (ro)":43.83,"MassiveIntentClassification (ru)":17.32,"MassiveIntentClassification (sl)":33.71,"MassiveIntentClassification (sq)":37.62,"MassiveIntentClassification (sw)":31.9,"MassiveIntentClassification (ta)":1.91,"MassiveIntentClassification (te)":2.54,"MassiveIntentClassification (th)":3.85,"MassiveIntentClassification (tl)":36.83,"MassiveIntentClassification (tr)":33.0,"MassiveIntentClassification (ur)":2.62,"MassiveIntentClassification (vi)":22.81,"MassiveIntentClassification (zh-TW)":3.49,"MassiveScenarioClassification (af)":50.28,"MassiveScenarioClassification (am)":7.15,"MassiveScenarioClassification (ar)":12.12,"MassiveScenarioClassification (az)":39.68,"MassiveScenarioClassification (bn)":8.06,"MassiveScenarioClassification (cy)":38.01,"MassiveScenarioClassification (de)":62.71,"MassiveScenarioClassification (el)":17.19,"MassiveScenarioClassification (es)":59.56,"MassiveScenarioClassification (fa)":6.5,"MassiveScenarioClassification (fi)":41.72,"MassiveScenarioClassification (fr)":63.6,"MassiveScenarioClassification (he)":7.93,"MassiveScenarioClassification (hi)":7.85,"MassiveScenarioClassification (hu)":41.37,"MassiveScenarioClassification (hy)":9.42,"MassiveScenarioClassification (id)":44.88,"MassiveScenarioClassification (is)":40.86,"MassiveScenarioClassification (it)":60.09,"MassiveScenarioClassification (ja)":6.56,"MassiveScenarioClassification (jv)":40.18,"MassiveScenarioClassification (ka)":7.37,"MassiveScenarioClassification (km)":9.56,"MassiveScenarioClassification (kn)":8.4,"MassiveScenarioClassification (ko)":5.96,"MassiveScenarioClassification (lv)":41.44,"MassiveScenarioClassification (ml)":7.47,"MassiveScenarioClassification (mn)":25.36,"MassiveScenarioClassification (ms)":39.69,"MassiveScenarioClassification (my)":9.68,"MassiveScenarioClassification (nl)":56.09,"MassiveScenarioClassification (pt)":57.99,"MassiveScenarioClassification (ro)":56.0,"MassiveScenarioClassification (ru)":27.47,"MassiveScenarioClassification (sl)":41.04,"MassiveScenarioClassification (sq)":49.38,"MassiveScenarioClassification (sw)":40.62,"MassiveScenarioClassification (ta)":7.59,"MassiveScenarioClassification (te)":7.07,"MassiveScenarioClassification (th)":8.52,"MassiveScenarioClassification (tl)":49.89,"MassiveScenarioClassification (tr)":43.08,"MassiveScenarioClassification (ur)":9.31,"MassiveScenarioClassification (vi)":27.46,"MassiveScenarioClassification (zh-TW)":7.24} +{"Rank":9,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":26.73,"AmazonCounterfactualClassification (de)":69.98,"AmazonCounterfactualClassification (ja)":46.05,"AmazonReviewsClassification (de)":37.9,"AmazonReviewsClassification (es)":37.33,"AmazonReviewsClassification (fr)":37.35,"AmazonReviewsClassification (ja)":22.29,"AmazonReviewsClassification (zh)":21.53,"MTOPDomainClassification (de)":76.98,"MTOPDomainClassification (es)":73.61,"MTOPDomainClassification (fr)":75.03,"MTOPDomainClassification (hi)":21.4,"MTOPDomainClassification (th)":16.21,"MTOPIntentClassification (de)":44.43,"MTOPIntentClassification (es)":42.03,"MTOPIntentClassification (fr)":43.85,"MTOPIntentClassification (hi)":3.8,"MTOPIntentClassification (th)":5.21,"MassiveIntentClassification (af)":34.32,"MassiveIntentClassification (am)":2.38,"MassiveIntentClassification (ar)":4.53,"MassiveIntentClassification (az)":31.76,"MassiveIntentClassification (bn)":2.58,"MassiveIntentClassification (cy)":28.94,"MassiveIntentClassification (de)":45.23,"MassiveIntentClassification (el)":10.05,"MassiveIntentClassification (es)":45.32,"MassiveIntentClassification (fa)":3.58,"MassiveIntentClassification (fi)":33.52,"MassiveIntentClassification (fr)":51.13,"MassiveIntentClassification (he)":2.63,"MassiveIntentClassification (hi)":2.68,"MassiveIntentClassification (hu)":32.31,"MassiveIntentClassification (hy)":3.33,"MassiveIntentClassification (id)":35.5,"MassiveIntentClassification (is)":29.82,"MassiveIntentClassification (it)":45.59,"MassiveIntentClassification (ja)":3.67,"MassiveIntentClassification (jv)":31.15,"MassiveIntentClassification (ka)":2.77,"MassiveIntentClassification (km)":5.66,"MassiveIntentClassification (kn)":2.59,"MassiveIntentClassification (ko)":2.34,"MassiveIntentClassification (lv)":33.97,"MassiveIntentClassification (ml)":2.55,"MassiveIntentClassification (mn)":14.7,"MassiveIntentClassification (ms)":33.12,"MassiveIntentClassification (my)":4.42,"MassiveIntentClassification (nl)":37.96,"MassiveIntentClassification (pt)":43.35,"MassiveIntentClassification (ro)":42.69,"MassiveIntentClassification (ru)":14.82,"MassiveIntentClassification (sl)":34.54,"MassiveIntentClassification (sq)":38.54,"MassiveIntentClassification (sw)":32.14,"MassiveIntentClassification (ta)":1.41,"MassiveIntentClassification (te)":2.5,"MassiveIntentClassification (th)":3.71,"MassiveIntentClassification (tl)":36.04,"MassiveIntentClassification (tr)":33.77,"MassiveIntentClassification (ur)":2.99,"MassiveIntentClassification (vi)":22.62,"MassiveIntentClassification (zh-TW)":4.63,"MassiveScenarioClassification (af)":44.45,"MassiveScenarioClassification (am)":7.51,"MassiveScenarioClassification (ar)":12.32,"MassiveScenarioClassification (az)":38.41,"MassiveScenarioClassification (bn)":8.45,"MassiveScenarioClassification (cy)":35.04,"MassiveScenarioClassification (de)":59.12,"MassiveScenarioClassification (el)":17.68,"MassiveScenarioClassification (es)":55.61,"MassiveScenarioClassification (fa)":6.86,"MassiveScenarioClassification (fi)":41.34,"MassiveScenarioClassification (fr)":59.92,"MassiveScenarioClassification (he)":7.86,"MassiveScenarioClassification (hi)":7.63,"MassiveScenarioClassification (hu)":41.31,"MassiveScenarioClassification (hy)":9.23,"MassiveScenarioClassification (id)":44.64,"MassiveScenarioClassification (is)":39.63,"MassiveScenarioClassification (it)":54.58,"MassiveScenarioClassification (ja)":4.96,"MassiveScenarioClassification (jv)":40.73,"MassiveScenarioClassification (ka)":7.51,"MassiveScenarioClassification (km)":8.73,"MassiveScenarioClassification (kn)":7.99,"MassiveScenarioClassification (ko)":6.03,"MassiveScenarioClassification (lv)":36.42,"MassiveScenarioClassification (ml)":6.96,"MassiveScenarioClassification (mn)":19.85,"MassiveScenarioClassification (ms)":43.18,"MassiveScenarioClassification (my)":9.46,"MassiveScenarioClassification (nl)":50.0,"MassiveScenarioClassification (pt)":52.24,"MassiveScenarioClassification (ro)":53.7,"MassiveScenarioClassification (ru)":20.69,"MassiveScenarioClassification (sl)":39.79,"MassiveScenarioClassification (sq)":50.16,"MassiveScenarioClassification (sw)":40.48,"MassiveScenarioClassification (ta)":7.47,"MassiveScenarioClassification (te)":6.87,"MassiveScenarioClassification (th)":8.26,"MassiveScenarioClassification (tl)":48.94,"MassiveScenarioClassification (tr)":41.83,"MassiveScenarioClassification (ur)":9.77,"MassiveScenarioClassification (vi)":30.01,"MassiveScenarioClassification (zh-TW)":7.91} +{"Rank":10,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":24.49,"AmazonCounterfactualClassification (de)":54.46,"AmazonCounterfactualClassification (ja)":43.87,"AmazonReviewsClassification (de)":24.08,"AmazonReviewsClassification (es)":23.88,"AmazonReviewsClassification (fr)":23.31,"AmazonReviewsClassification (ja)":20.25,"AmazonReviewsClassification (zh)":20.49,"MTOPDomainClassification (de)":48.55,"MTOPDomainClassification (es)":58.39,"MTOPDomainClassification (fr)":54.61,"MTOPDomainClassification (hi)":21.22,"MTOPDomainClassification (th)":14.98,"MTOPIntentClassification (de)":35.55,"MTOPIntentClassification (es)":36.72,"MTOPIntentClassification (fr)":34.71,"MTOPIntentClassification (hi)":4.44,"MTOPIntentClassification (th)":4.67,"MassiveIntentClassification (af)":33.68,"MassiveIntentClassification (am)":2.94,"MassiveIntentClassification (ar)":10.04,"MassiveIntentClassification (az)":30.74,"MassiveIntentClassification (bn)":3.02,"MassiveIntentClassification (cy)":33.94,"MassiveIntentClassification (de)":36.06,"MassiveIntentClassification (el)":27.7,"MassiveIntentClassification (es)":35.6,"MassiveIntentClassification (fa)":17.97,"MassiveIntentClassification (fi)":35.53,"MassiveIntentClassification (fr)":38.41,"MassiveIntentClassification (he)":2.69,"MassiveIntentClassification (hi)":3.43,"MassiveIntentClassification (hu)":34.05,"MassiveIntentClassification (hy)":3.11,"MassiveIntentClassification (id)":40.02,"MassiveIntentClassification (is)":32.63,"MassiveIntentClassification (it)":39.28,"MassiveIntentClassification (ja)":4.95,"MassiveIntentClassification (jv)":34.95,"MassiveIntentClassification (ka)":2.57,"MassiveIntentClassification (km)":4.73,"MassiveIntentClassification (kn)":3.54,"MassiveIntentClassification (ko)":2.68,"MassiveIntentClassification (lv)":37.91,"MassiveIntentClassification (ml)":2.88,"MassiveIntentClassification (mn)":16.94,"MassiveIntentClassification (ms)":36.6,"MassiveIntentClassification (my)":3.96,"MassiveIntentClassification (nl)":33.95,"MassiveIntentClassification (pt)":43.05,"MassiveIntentClassification (ro)":36.2,"MassiveIntentClassification (ru)":25.3,"MassiveIntentClassification (sl)":35.9,"MassiveIntentClassification (sq)":36.6,"MassiveIntentClassification (sw)":34.81,"MassiveIntentClassification (ta)":3.11,"MassiveIntentClassification (te)":2.53,"MassiveIntentClassification (th)":4.38,"MassiveIntentClassification (tl)":35.51,"MassiveIntentClassification (tr)":32.02,"MassiveIntentClassification (ur)":9.61,"MassiveIntentClassification (vi)":37.07,"MassiveIntentClassification (zh-TW)":4.79,"MassiveScenarioClassification (af)":36.17,"MassiveScenarioClassification (am)":7.64,"MassiveScenarioClassification (ar)":15.26,"MassiveScenarioClassification (az)":30.73,"MassiveScenarioClassification (bn)":7.15,"MassiveScenarioClassification (cy)":34.73,"MassiveScenarioClassification (de)":38.62,"MassiveScenarioClassification (el)":27.18,"MassiveScenarioClassification (es)":39.44,"MassiveScenarioClassification (fa)":21.43,"MassiveScenarioClassification (fi)":33.21,"MassiveScenarioClassification (fr)":40.26,"MassiveScenarioClassification (he)":7.42,"MassiveScenarioClassification (hi)":8.06,"MassiveScenarioClassification (hu)":34.54,"MassiveScenarioClassification (hy)":8.61,"MassiveScenarioClassification (id)":40.04,"MassiveScenarioClassification (is)":33.57,"MassiveScenarioClassification (it)":40.1,"MassiveScenarioClassification (ja)":9.96,"MassiveScenarioClassification (jv)":36.11,"MassiveScenarioClassification (ka)":7.13,"MassiveScenarioClassification (km)":9.66,"MassiveScenarioClassification (kn)":7.55,"MassiveScenarioClassification (ko)":7.27,"MassiveScenarioClassification (lv)":37.03,"MassiveScenarioClassification (ml)":7.22,"MassiveScenarioClassification (mn)":21.53,"MassiveScenarioClassification (ms)":37.57,"MassiveScenarioClassification (my)":9.54,"MassiveScenarioClassification (nl)":34.62,"MassiveScenarioClassification (pt)":44.68,"MassiveScenarioClassification (ro)":37.29,"MassiveScenarioClassification (ru)":28.16,"MassiveScenarioClassification (sl)":37.95,"MassiveScenarioClassification (sq)":37.82,"MassiveScenarioClassification (sw)":35.37,"MassiveScenarioClassification (ta)":7.19,"MassiveScenarioClassification (te)":7.29,"MassiveScenarioClassification (th)":9.47,"MassiveScenarioClassification (tl)":37.31,"MassiveScenarioClassification (tr)":34.57,"MassiveScenarioClassification (ur)":16.17,"MassiveScenarioClassification (vi)":35.91,"MassiveScenarioClassification (zh-TW)":10.19} +{"Rank":11,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.59,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.05,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":66.09,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.83,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.71,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":12,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":37.26,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":79.79,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":45.62,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":53.7,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":62.46,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":13,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":42.15,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":87.68,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":59.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":63.08,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":70.15,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":14,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.98,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.12,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":62.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.94,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":72.78,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":15,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.36,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.33,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":60.52,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":68.06,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":74.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":16,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.18,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":17,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":52.95,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":18,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.15,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":19,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":41.94,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":20,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":41.38,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":21,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":35.91,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":22,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":38.6,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":80.79,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":50.01,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":56.31,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":59.5,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":23,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.89,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.23,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":61.07,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.94,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":67.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":24,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":33.77,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":25,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.38,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.65,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.87,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.28,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.5,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":26,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.35,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.7,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.85,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.28,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":27,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.39,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.63,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.86,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.3,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":28,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":34.79,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.52,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.12,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":59.41,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":65.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":29,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.03,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":77.1,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":43.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":51.59,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":61.28,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":30,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":37.97,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.74,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.62,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":31,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":24.9,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":25.55,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":9.49,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":6.98,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":11.41,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":32,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":23.52,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":27.74,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":8.61,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":6.24,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":10.98,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":33,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":22.45,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":24.27,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":9.79,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":16.41,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":22.72,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":34,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.39,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.61,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.84,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.3,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":35,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.02,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":64.49,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":39.4,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":38.01,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":43.63,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":36,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.71,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":74.8,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":53.97,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":46.39,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":53.86,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":37,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":40.94,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":84.79,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":55.51,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":38,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.91,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.41,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":59.43,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":39,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":39.68,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":81.2,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":46.01,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":40,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":35.12,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":69.24,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":51.25,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":43.21,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":49.78,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":41,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":26.85,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":34.99,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":15.76,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":15.09,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":21.67,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":42,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":43.02,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":43,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":44.44,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":44,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":27.05,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":72.97,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.18,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":42.64,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":49.92,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":45,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AmazonCounterfactualClassification (de)":68.35,"AmazonCounterfactualClassification (ja)":63.45,"AmazonReviewsClassification (de)":35.91,"AmazonReviewsClassification (es)":37.49,"AmazonReviewsClassification (fr)":35.3,"AmazonReviewsClassification (ja)":33.24,"AmazonReviewsClassification (zh)":35.26,"MTOPDomainClassification (de)":79.2,"MTOPDomainClassification (es)":83.04,"MTOPDomainClassification (fr)":78.63,"MTOPDomainClassification (hi)":81.36,"MTOPDomainClassification (th)":79.99,"MTOPIntentClassification (de)":54.23,"MTOPIntentClassification (es)":60.28,"MTOPIntentClassification (fr)":54.05,"MTOPIntentClassification (hi)":59.9,"MTOPIntentClassification (th)":61.96,"MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":57.52,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":64.52,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":46,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":69.95,"AmazonCounterfactualClassification (ja)":69.79,"AmazonReviewsClassification (de)":39.52,"AmazonReviewsClassification (es)":39.99,"AmazonReviewsClassification (fr)":39.0,"AmazonReviewsClassification (ja)":36.64,"AmazonReviewsClassification (zh)":37.74,"MTOPDomainClassification (de)":85.73,"MTOPDomainClassification (es)":86.96,"MTOPDomainClassification (fr)":81.21,"MTOPDomainClassification (hi)":84.76,"MTOPDomainClassification (th)":82.51,"MTOPIntentClassification (de)":61.27,"MTOPIntentClassification (es)":66.59,"MTOPIntentClassification (fr)":59.76,"MTOPIntentClassification (hi)":62.37,"MTOPIntentClassification (th)":64.8,"MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":61.88,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":67.9,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":47,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":46.09,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.2,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.33,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.91,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":68.53,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":48,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.12,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":49,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.46,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":50,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":34.25,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":71.83,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":44.53,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":51.93,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":58.31,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":51,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":33.51,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.5,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":53.98,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":61.19,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":70.22,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":52,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":35.09,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.19,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.64,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.8,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":73.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":53,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":26.75,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":43.83,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":19.38,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":13.58,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":23.21,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":54,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":26.62,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":36.77,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":15.37,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":15.82,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":23.92,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":55,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.76,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":38.3,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":89.38,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":64.45,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.42,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.11,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} +{"Rank":56,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":48.3,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""} diff --git a/boards_data/other-sts/data_tasks/STS/default.jsonl b/boards_data/other-sts/data_tasks/STS/default.jsonl index d109775affad1d9b9e970596cd04ddec62179a9d..0e6bc0c5f1e3bcc7b8685490bf76f82f8745df46 100644 --- a/boards_data/other-sts/data_tasks/STS/default.jsonl +++ b/boards_data/other-sts/data_tasks/STS/default.jsonl @@ -1,269 +1,101 @@ -{"level_0":0,"index":147,"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":73.17,"STS17 (ar-ar)":81.87,"STS17 (en-ar)":77.93,"STS17 (en-de)":87.3,"STS17 (en-tr)":72.56,"STS17 (es-en)":88.24,"STS17 (es-es)":87.46,"STS17 (fr-en)":88.06,"STS17 (it-en)":89.68,"STS17 (ko-ko)":83.69,"STS17 (nl-en)":88.25,"STS22 (ar)":54.12,"STS22 (de)":49.12,"STS22 (de-en)":60.92,"STS22 (de-fr)":61.39,"STS22 (de-pl)":54.47,"STS22 (es)":67.0,"STS22 (es-en)":75.84,"STS22 (es-it)":75.04,"STS22 (fr)":69.82,"STS22 (fr-pl)":84.52,"STS22 (it)":75.87,"STS22 (pl)":39.21,"STS22 (pl-en)":73.18,"STS22 (ru)":60.83,"STS22 (tr)":68.72,"STS22 (zh-en)":71.88,"STSBenchmark":88.6} -{"level_0":1,"index":151,"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":70.02,"STS17 (ar-ar)":77.88,"STS17 (en-ar)":75.06,"STS17 (en-de)":86.16,"STS17 (en-tr)":71.23,"STS17 (es-en)":80.75,"STS17 (es-es)":86.74,"STS17 (fr-en)":85.62,"STS17 (it-en)":84.54,"STS17 (ko-ko)":82.27,"STS17 (nl-en)":85.28,"STS22 (ar)":56.99,"STS22 (de)":56.59,"STS22 (de-en)":56.6,"STS22 (de-fr)":67.79,"STS22 (de-pl)":49.58,"STS22 (es)":64.58,"STS22 (es-en)":72.52,"STS22 (es-it)":68.93,"STS22 (fr)":76.79,"STS22 (fr-pl)":50.71,"STS22 (it)":76.98,"STS22 (pl)":34.66,"STS22 (pl-en)":65.54,"STS22 (ru)":59.9,"STS22 (tr)":63.55,"STS22 (zh-en)":66.0,"STSBenchmark":87.29} -{"level_0":2,"index":103,"Rank":3,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.85,"STS17 (ar-ar)":79.38,"STS17 (en-ar)":58.76,"STS17 (en-de)":76.13,"STS17 (en-tr)":55.53,"STS17 (es-en)":72.26,"STS17 (es-es)":85.06,"STS17 (fr-en)":75.63,"STS17 (it-en)":71.36,"STS17 (ko-ko)":80.79,"STS17 (nl-en)":71.99,"STS22 (ar)":57.44,"STS22 (de)":60.12,"STS22 (de-en)":53.36,"STS22 (de-fr)":58.25,"STS22 (de-pl)":48.47,"STS22 (es)":68.57,"STS22 (es-en)":77.41,"STS22 (es-it)":74.69,"STS22 (fr)":81.47,"STS22 (fr-pl)":73.25,"STS22 (it)":79.28,"STS22 (pl)":42.08,"STS22 (pl-en)":77.5,"STS22 (ru)":61.71,"STS22 (tr)":66.62,"STS22 (zh-en)":69.87,"STSBenchmark":81.95} -{"level_0":3,"index":150,"Rank":4,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":68.84,"STS17 (ar-ar)":74.52,"STS17 (en-ar)":71.27,"STS17 (en-de)":82.09,"STS17 (en-tr)":63.33,"STS17 (es-en)":76.5,"STS17 (es-es)":86.74,"STS17 (fr-en)":80.18,"STS17 (it-en)":80.15,"STS17 (ko-ko)":79.95,"STS17 (nl-en)":79.25,"STS22 (ar)":57.87,"STS22 (de)":55.95,"STS22 (de-en)":54.93,"STS22 (de-fr)":59.47,"STS22 (de-pl)":39.35,"STS22 (es)":66.58,"STS22 (es-en)":73.99,"STS22 (es-it)":66.46,"STS22 (fr)":74.8,"STS22 (fr-pl)":73.25,"STS22 (it)":77.76,"STS22 (pl)":34.07,"STS22 (pl-en)":70.37,"STS22 (ru)":60.66,"STS22 (tr)":63.7,"STS22 (zh-en)":69.92,"STSBenchmark":85.64} -{"level_0":4,"index":153,"Rank":5,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":67.07,"STS17 (ar-ar)":73.03,"STS17 (en-ar)":57.41,"STS17 (en-de)":77.24,"STS17 (en-tr)":55.97,"STS17 (es-en)":72.44,"STS17 (es-es)":84.84,"STS17 (fr-en)":72.29,"STS17 (it-en)":77.33,"STS17 (ko-ko)":78.87,"STS17 (nl-en)":75.38,"STS22 (ar)":56.65,"STS22 (de)":53.45,"STS22 (de-en)":56.49,"STS22 (de-fr)":60.57,"STS22 (de-pl)":28.24,"STS22 (es)":66.88,"STS22 (es-en)":74.57,"STS22 (es-it)":71.81,"STS22 (fr)":76.58,"STS22 (fr-pl)":84.52,"STS22 (it)":76.53,"STS22 (pl)":35.8,"STS22 (pl-en)":72.69,"STS22 (ru)":59.9,"STS22 (tr)":63.71,"STS22 (zh-en)":63.74,"STSBenchmark":84.01} -{"level_0":5,"index":217,"Rank":6,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":66.01,"STS17 (ar-ar)":69.07,"STS17 (en-ar)":74.51,"STS17 (en-de)":73.85,"STS17 (en-tr)":72.07,"STS17 (es-en)":65.71,"STS17 (es-es)":80.83,"STS17 (fr-en)":76.98,"STS17 (it-en)":76.99,"STS17 (ko-ko)":71.32,"STS17 (nl-en)":75.22,"STS22 (ar)":57.67,"STS22 (de)":48.58,"STS22 (de-en)":50.14,"STS22 (de-fr)":53.28,"STS22 (de-pl)":58.69,"STS22 (es)":63.18,"STS22 (es-en)":71.86,"STS22 (es-it)":69.69,"STS22 (fr)":77.95,"STS22 (fr-pl)":61.98,"STS22 (it)":72.22,"STS22 (pl)":39.28,"STS22 (pl-en)":69.41,"STS22 (ru)":57.49,"STS22 (tr)":58.15,"STS22 (zh-en)":64.02,"STSBenchmark":72.25} -{"level_0":6,"index":238,"Rank":7,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.66,"STS17 (ar-ar)":78.03,"STS17 (en-ar)":78.6,"STS17 (en-de)":81.48,"STS17 (en-tr)":76.34,"STS17 (es-en)":81.81,"STS17 (es-es)":87.91,"STS17 (fr-en)":78.06,"STS17 (it-en)":80.98,"STS17 (ko-ko)":68.24,"STS17 (nl-en)":81.0,"STS22 (ar)":54.51,"STS22 (de)":46.89,"STS22 (de-en)":45.0,"STS22 (de-fr)":49.43,"STS22 (de-pl)":39.32,"STS22 (es)":58.94,"STS22 (es-en)":67.71,"STS22 (es-it)":50.79,"STS22 (fr)":74.1,"STS22 (fr-pl)":73.25,"STS22 (it)":65.86,"STS22 (pl)":34.81,"STS22 (pl-en)":60.17,"STS22 (ru)":54.51,"STS22 (tr)":57.29,"STS22 (zh-en)":61.29,"STSBenchmark":86.45} -{"level_0":7,"index":55,"Rank":8,"Model":"paraphrase-multilingual-mpnet-base-v2-KE_Sieve<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.52,"STS17 (ar-ar)":76.04,"STS17 (en-ar)":77.6,"STS17 (en-de)":70.4,"STS17 (en-tr)":71.04,"STS17 (es-en)":81.59,"STS17 (es-es)":82.77,"STS17 (fr-en)":77.16,"STS17 (it-en)":81.52,"STS17 (ko-ko)":77.0,"STS17 (nl-en)":80.7,"STS22 (ar)":52.61,"STS22 (de)":41.84,"STS22 (de-en)":49.09,"STS22 (de-fr)":50.6,"STS22 (de-pl)":50.44,"STS22 (es)":57.23,"STS22 (es-en)":67.29,"STS22 (es-it)":57.93,"STS22 (fr)":72.79,"STS22 (fr-pl)":73.25,"STS22 (it)":64.17,"STS22 (pl)":36.37,"STS22 (pl-en)":67.72,"STS22 (ru)":53.35,"STS22 (tr)":52.71,"STS22 (zh-en)":64.45,"STSBenchmark":81.34} -{"level_0":8,"index":224,"Rank":9,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":65.43,"STS17 (ar-ar)":77.34,"STS17 (en-ar)":77.46,"STS17 (en-de)":80.24,"STS17 (en-tr)":74.34,"STS17 (es-en)":77.4,"STS17 (es-es)":83.71,"STS17 (fr-en)":79.28,"STS17 (it-en)":80.82,"STS17 (ko-ko)":76.4,"STS17 (nl-en)":80.51,"STS22 (ar)":49.04,"STS22 (de)":35.73,"STS22 (de-en)":47.51,"STS22 (de-fr)":60.76,"STS22 (de-pl)":36.09,"STS22 (es)":59.34,"STS22 (es-en)":68.96,"STS22 (es-it)":63.28,"STS22 (fr)":76.41,"STS22 (fr-pl)":61.98,"STS22 (it)":65.1,"STS22 (pl)":34.58,"STS22 (pl-en)":71.33,"STS22 (ru)":52.4,"STS22 (tr)":54.07,"STS22 (zh-en)":61.75,"STSBenchmark":80.75} -{"level_0":9,"index":157,"Rank":10,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.1,"STS17 (ar-ar)":81.13,"STS17 (en-ar)":79.64,"STS17 (en-de)":52.52,"STS17 (en-tr)":4.75,"STS17 (es-en)":85.41,"STS17 (es-es)":87.33,"STS17 (fr-en)":83.96,"STS17 (it-en)":45.62,"STS17 (ko-ko)":61.89,"STS17 (nl-en)":46.69,"STS22 (ar)":55.0,"STS22 (de)":37.51,"STS22 (de-en)":51.66,"STS22 (de-fr)":39.66,"STS22 (de-pl)":26.11,"STS22 (es)":59.79,"STS22 (es-en)":73.59,"STS22 (es-it)":67.83,"STS22 (fr)":77.1,"STS22 (fr-pl)":84.52,"STS22 (it)":68.87,"STS22 (pl)":27.98,"STS22 (pl-en)":60.77,"STS22 (ru)":43.14,"STS22 (tr)":42.33,"STS22 (zh-en)":65.01,"STSBenchmark":85.79} -{"level_0":10,"index":115,"Rank":11,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.34,"STS17 (ar-ar)":76.42,"STS17 (en-ar)":78.07,"STS17 (en-de)":59.1,"STS17 (en-tr)":11.8,"STS17 (es-en)":78.22,"STS17 (es-es)":86.0,"STS17 (fr-en)":80.46,"STS17 (it-en)":51.58,"STS17 (ko-ko)":66.89,"STS17 (nl-en)":45.85,"STS22 (ar)":58.67,"STS22 (de)":30.05,"STS22 (de-en)":51.16,"STS22 (de-fr)":53.28,"STS22 (de-pl)":43.05,"STS22 (es)":65.41,"STS22 (es-en)":75.06,"STS22 (es-it)":65.5,"STS22 (fr)":80.38,"STS22 (fr-pl)":28.17,"STS22 (it)":65.65,"STS22 (pl)":31.13,"STS22 (pl-en)":53.31,"STS22 (ru)":43.36,"STS22 (tr)":47.14,"STS22 (zh-en)":68.45,"STSBenchmark":80.9} -{"level_0":11,"index":132,"Rank":12,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.22,"STS17 (ar-ar)":74.97,"STS17 (en-ar)":74.05,"STS17 (en-de)":74.95,"STS17 (en-tr)":77.18,"STS17 (es-en)":72.25,"STS17 (es-es)":80.65,"STS17 (fr-en)":77.2,"STS17 (it-en)":78.67,"STS17 (ko-ko)":66.14,"STS17 (nl-en)":78.14,"STS22 (ar)":40.25,"STS22 (de)":24.09,"STS22 (de-en)":34.28,"STS22 (de-fr)":41.29,"STS22 (de-pl)":25.81,"STS22 (es)":55.4,"STS22 (es-en)":57.82,"STS22 (es-it)":49.13,"STS22 (fr)":61.72,"STS22 (fr-pl)":61.98,"STS22 (it)":62.2,"STS22 (pl)":25.31,"STS22 (pl-en)":44.72,"STS22 (ru)":43.57,"STS22 (tr)":46.46,"STS22 (zh-en)":49.19,"STSBenchmark":67.39} -{"level_0":12,"index":11,"Rank":13,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":52.31,"STS17 (ar-ar)":67.47,"STS17 (en-ar)":65.05,"STS17 (en-de)":66.66,"STS17 (en-tr)":70.05,"STS17 (es-en)":55.3,"STS17 (es-es)":79.67,"STS17 (fr-en)":70.82,"STS17 (it-en)":70.98,"STS17 (ko-ko)":70.52,"STS17 (nl-en)":68.12,"STS22 (ar)":42.57,"STS22 (de)":25.69,"STS22 (de-en)":32.35,"STS22 (de-fr)":37.41,"STS22 (de-pl)":15.67,"STS22 (es)":54.92,"STS22 (es-en)":54.34,"STS22 (es-it)":42.21,"STS22 (fr)":58.61,"STS22 (fr-pl)":39.44,"STS22 (it)":60.31,"STS22 (pl)":18.34,"STS22 (pl-en)":53.63,"STS22 (ru)":39.24,"STS22 (tr)":36.97,"STS22 (zh-en)":46.19,"STSBenchmark":69.77} -{"level_0":13,"index":159,"Rank":14,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.04,"STS17 (ar-ar)":80.6,"STS17 (en-ar)":72.6,"STS17 (en-de)":40.34,"STS17 (en-tr)":6.79,"STS17 (es-en)":81.8,"STS17 (es-es)":85.65,"STS17 (fr-en)":79.94,"STS17 (it-en)":34.8,"STS17 (ko-ko)":57.28,"STS17 (nl-en)":33.58,"STS22 (ar)":54.82,"STS22 (de)":26.63,"STS22 (de-en)":49.55,"STS22 (de-fr)":22.36,"STS22 (de-pl)":35.32,"STS22 (es)":56.31,"STS22 (es-en)":71.03,"STS22 (es-it)":61.3,"STS22 (fr)":61.35,"STS22 (fr-pl)":73.25,"STS22 (it)":62.61,"STS22 (pl)":15.06,"STS22 (pl-en)":43.72,"STS22 (ru)":28.77,"STS22 (tr)":22.11,"STS22 (zh-en)":63.9,"STSBenchmark":83.63} -{"level_0":14,"index":235,"Rank":15,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":50.06,"STS17 (ar-ar)":11.13,"STS17 (en-ar)":-3.93,"STS17 (en-de)":79.04,"STS17 (en-tr)":13.61,"STS17 (es-en)":71.72,"STS17 (es-es)":83.42,"STS17 (fr-en)":71.38,"STS17 (it-en)":69.5,"STS17 (ko-ko)":9.61,"STS17 (nl-en)":66.12,"STS22 (ar)":29.6,"STS22 (de)":47.72,"STS22 (de-en)":49.64,"STS22 (de-fr)":62.21,"STS22 (de-pl)":34.34,"STS22 (es)":58.16,"STS22 (es-en)":69.15,"STS22 (es-it)":65.26,"STS22 (fr)":77.49,"STS22 (fr-pl)":50.71,"STS22 (it)":66.91,"STS22 (pl)":27.04,"STS22 (pl-en)":58.85,"STS22 (ru)":26.63,"STS22 (tr)":43.36,"STS22 (zh-en)":29.0,"STSBenchmark":83.93} -{"level_0":15,"index":227,"Rank":16,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":49.47,"STS17 (ar-ar)":9.06,"STS17 (en-ar)":-3.22,"STS17 (en-de)":70.38,"STS17 (en-tr)":17.17,"STS17 (es-en)":60.24,"STS17 (es-es)":81.93,"STS17 (fr-en)":62.17,"STS17 (it-en)":59.11,"STS17 (ko-ko)":8.9,"STS17 (nl-en)":56.91,"STS22 (ar)":37.66,"STS22 (de)":50.58,"STS22 (de-en)":53.63,"STS22 (de-fr)":55.72,"STS22 (de-pl)":27.99,"STS22 (es)":59.14,"STS22 (es-en)":69.99,"STS22 (es-it)":60.94,"STS22 (fr)":79.43,"STS22 (fr-pl)":61.98,"STS22 (it)":67.14,"STS22 (pl)":33.74,"STS22 (pl-en)":60.18,"STS22 (ru)":32.69,"STS22 (tr)":55.79,"STS22 (zh-en)":28.85,"STSBenchmark":77.65} -{"level_0":16,"index":49,"Rank":17,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.36,"STS17 (ar-ar)":46.8,"STS17 (en-ar)":-0.78,"STS17 (en-de)":47.5,"STS17 (en-tr)":4.18,"STS17 (es-en)":44.27,"STS17 (es-es)":79.22,"STS17 (fr-en)":47.15,"STS17 (it-en)":42.65,"STS17 (ko-ko)":39.79,"STS17 (nl-en)":36.6,"STS22 (ar)":25.06,"STS22 (de)":39.49,"STS22 (de-en)":54.22,"STS22 (de-fr)":48.91,"STS22 (de-pl)":33.04,"STS22 (es)":59.47,"STS22 (es-en)":66.65,"STS22 (es-it)":64.37,"STS22 (fr)":79.88,"STS22 (fr-pl)":39.44,"STS22 (it)":68.15,"STS22 (pl)":35.38,"STS22 (pl-en)":62.7,"STS22 (ru)":30.62,"STS22 (tr)":45.65,"STS22 (zh-en)":49.25,"STSBenchmark":88.96} -{"level_0":17,"index":226,"Rank":18,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":46.79,"STS17 (ar-ar)":10.19,"STS17 (en-ar)":-5.77,"STS17 (en-de)":67.43,"STS17 (en-tr)":8.75,"STS17 (es-en)":54.96,"STS17 (es-es)":82.74,"STS17 (fr-en)":60.5,"STS17 (it-en)":46.26,"STS17 (ko-ko)":8.96,"STS17 (nl-en)":47.48,"STS22 (ar)":34.97,"STS22 (de)":51.7,"STS22 (de-en)":48.76,"STS22 (de-fr)":57.5,"STS22 (de-pl)":32.76,"STS22 (es)":57.49,"STS22 (es-en)":67.76,"STS22 (es-it)":57.18,"STS22 (fr)":78.7,"STS22 (fr-pl)":61.98,"STS22 (it)":67.67,"STS22 (pl)":30.68,"STS22 (pl-en)":54.17,"STS22 (ru)":15.36,"STS22 (tr)":58.12,"STS22 (zh-en)":29.42,"STSBenchmark":77.6} -{"level_0":18,"index":234,"Rank":19,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":44.35,"STS17 (ar-ar)":10.75,"STS17 (en-ar)":-4.71,"STS17 (en-de)":73.62,"STS17 (en-tr)":-0.42,"STS17 (es-en)":62.62,"STS17 (es-es)":82.74,"STS17 (fr-en)":67.86,"STS17 (it-en)":51.86,"STS17 (ko-ko)":9.44,"STS17 (nl-en)":45.95,"STS22 (ar)":27.01,"STS22 (de)":43.73,"STS22 (de-en)":49.93,"STS22 (de-fr)":61.58,"STS22 (de-pl)":38.83,"STS22 (es)":57.68,"STS22 (es-en)":68.09,"STS22 (es-it)":61.58,"STS22 (fr)":75.01,"STS22 (fr-pl)":5.63,"STS22 (it)":62.01,"STS22 (pl)":25.0,"STS22 (pl-en)":51.72,"STS22 (ru)":14.21,"STS22 (tr)":47.3,"STS22 (zh-en)":23.1,"STSBenchmark":85.36} -{"level_0":19,"index":233,"Rank":20,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":41.17,"STS17 (ar-ar)":13.36,"STS17 (en-ar)":-5.65,"STS17 (en-de)":67.11,"STS17 (en-tr)":-0.02,"STS17 (es-en)":47.72,"STS17 (es-es)":79.94,"STS17 (fr-en)":56.61,"STS17 (it-en)":30.46,"STS17 (ko-ko)":10.06,"STS17 (nl-en)":36.46,"STS22 (ar)":31.2,"STS22 (de)":42.08,"STS22 (de-en)":46.9,"STS22 (de-fr)":55.04,"STS22 (de-pl)":33.94,"STS22 (es)":53.81,"STS22 (es-en)":65.19,"STS22 (es-it)":55.29,"STS22 (fr)":77.69,"STS22 (fr-pl)":28.17,"STS22 (it)":60.65,"STS22 (pl)":24.42,"STS22 (pl-en)":42.97,"STS22 (ru)":12.13,"STS22 (tr)":40.45,"STS22 (zh-en)":20.15,"STSBenchmark":85.52} -{"level_0":20,"index":219,"Rank":21,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":37.71,"STS17 (ar-ar)":50.89,"STS17 (en-ar)":-4.28,"STS17 (en-de)":35.82,"STS17 (en-tr)":4.5,"STS17 (es-en)":16.31,"STS17 (es-es)":76.12,"STS17 (fr-en)":37.09,"STS17 (it-en)":24.45,"STS17 (ko-ko)":43.39,"STS17 (nl-en)":29.0,"STS22 (ar)":22.64,"STS22 (de)":31.04,"STS22 (de-en)":44.04,"STS22 (de-fr)":30.07,"STS22 (de-pl)":4.93,"STS22 (es)":54.78,"STS22 (es-en)":53.42,"STS22 (es-it)":44.27,"STS22 (fr)":77.0,"STS22 (fr-pl)":50.71,"STS22 (it)":60.4,"STS22 (pl)":26.77,"STS22 (pl-en)":32.8,"STS22 (ru)":14.72,"STS22 (tr)":33.69,"STS22 (zh-en)":41.64,"STSBenchmark":82.03} -{"level_0":21,"index":72,"Rank":22,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.32,"STS17 (ar-ar)":55.62,"STS17 (en-ar)":8.21,"STS17 (en-de)":30.18,"STS17 (en-tr)":1.04,"STS17 (es-en)":28.78,"STS17 (es-es)":71.88,"STS17 (fr-en)":26.34,"STS17 (it-en)":20.73,"STS17 (ko-ko)":52.39,"STS17 (nl-en)":25.05,"STS22 (ar)":28.19,"STS22 (de)":21.99,"STS22 (de-en)":53.07,"STS22 (de-fr)":32.97,"STS22 (de-pl)":20.45,"STS22 (es)":49.81,"STS22 (es-en)":49.51,"STS22 (es-it)":45.78,"STS22 (fr)":67.66,"STS22 (fr-pl)":61.98,"STS22 (it)":48.25,"STS22 (pl)":23.31,"STS22 (pl-en)":36.8,"STS22 (ru)":9.07,"STS22 (tr)":34.66,"STS22 (zh-en)":28.68,"STSBenchmark":75.34} -{"level_0":22,"index":218,"Rank":23,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":34.57,"STS17 (ar-ar)":58.71,"STS17 (en-ar)":0.54,"STS17 (en-de)":27.54,"STS17 (en-tr)":0.43,"STS17 (es-en)":22.01,"STS17 (es-es)":78.37,"STS17 (fr-en)":30.7,"STS17 (it-en)":24.28,"STS17 (ko-ko)":43.37,"STS17 (nl-en)":24.51,"STS22 (ar)":17.54,"STS22 (de)":22.53,"STS22 (de-en)":42.86,"STS22 (de-fr)":43.52,"STS22 (de-pl)":1.63,"STS22 (es)":43.98,"STS22 (es-en)":53.99,"STS22 (es-it)":40.71,"STS22 (fr)":69.51,"STS22 (fr-pl)":16.9,"STS22 (it)":47.48,"STS22 (pl)":19.22,"STS22 (pl-en)":42.67,"STS22 (ru)":11.19,"STS22 (tr)":21.6,"STS22 (zh-en)":44.39,"STSBenchmark":83.09} -{"level_0":23,"index":73,"Rank":24,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":27.72,"STS17 (ar-ar)":54.16,"STS17 (en-ar)":1.72,"STS17 (en-de)":25.48,"STS17 (en-tr)":2.09,"STS17 (es-en)":21.93,"STS17 (es-es)":67.8,"STS17 (fr-en)":18.91,"STS17 (it-en)":16.39,"STS17 (ko-ko)":45.66,"STS17 (nl-en)":23.49,"STS22 (ar)":5.17,"STS22 (de)":11.0,"STS22 (de-en)":53.93,"STS22 (de-fr)":25.11,"STS22 (de-pl)":20.94,"STS22 (es)":43.05,"STS22 (es-en)":32.74,"STS22 (es-it)":35.99,"STS22 (fr)":54.56,"STS22 (fr-pl)":5.63,"STS22 (it)":33.68,"STS22 (pl)":14.91,"STS22 (pl-en)":20.54,"STS22 (ru)":3.36,"STS22 (tr)":3.82,"STS22 (zh-en)":26.71,"STSBenchmark":79.54} -{"level_0":24,"index":221,"Rank":25,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":25.1,"STS17 (ar-ar)":27.14,"STS17 (en-ar)":6.9,"STS17 (en-de)":11.59,"STS17 (en-tr)":6.46,"STS17 (es-en)":10.86,"STS17 (es-es)":55.45,"STS17 (fr-en)":16.02,"STS17 (it-en)":19.87,"STS17 (ko-ko)":8.08,"STS17 (nl-en)":24.92,"STS22 (ar)":19.57,"STS22 (de)":17.31,"STS22 (de-en)":26.03,"STS22 (de-fr)":10.26,"STS22 (de-pl)":16.94,"STS22 (es)":48.89,"STS22 (es-en)":51.79,"STS22 (es-it)":25.24,"STS22 (fr)":53.92,"STS22 (fr-pl)":39.44,"STS22 (it)":39.43,"STS22 (pl)":13.56,"STS22 (pl-en)":25.36,"STS22 (ru)":1.11,"STS22 (tr)":31.73,"STS22 (zh-en)":8.44,"STSBenchmark":61.26} -{"level_0":25,"index":223,"Rank":26,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":24.28,"STS17 (ar-ar)":13.78,"STS17 (en-ar)":9.08,"STS17 (en-de)":-3.11,"STS17 (en-tr)":-0.45,"STS17 (es-en)":-8.18,"STS17 (es-es)":48.23,"STS17 (fr-en)":5.81,"STS17 (it-en)":3.64,"STS17 (ko-ko)":2.54,"STS17 (nl-en)":0.44,"STS22 (ar)":32.42,"STS22 (de)":33.04,"STS22 (de-en)":28.65,"STS22 (de-fr)":14.77,"STS22 (de-pl)":11.21,"STS22 (es)":48.53,"STS22 (es-en)":26.97,"STS22 (es-it)":41.1,"STS22 (fr)":49.43,"STS22 (fr-pl)":39.44,"STS22 (it)":57.77,"STS22 (pl)":12.47,"STS22 (pl-en)":45.55,"STS22 (ru)":19.44,"STS22 (tr)":47.38,"STS22 (zh-en)":14.05,"STSBenchmark":61.55} -{"level_0":26,"index":0,"Rank":27,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.33} -{"level_0":27,"index":1,"Rank":28,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.99} -{"level_0":28,"index":2,"Rank":29,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":74.85} -{"level_0":29,"index":3,"Rank":30,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.74,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":30,"index":4,"Rank":31,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":70.51,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":31,"index":5,"Rank":32,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":79.99,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":32,"index":6,"Rank":33,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.22} -{"level_0":33,"index":7,"Rank":34,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":83.75,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":34,"index":8,"Rank":35,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.86} -{"level_0":35,"index":9,"Rank":36,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.56} -{"level_0":36,"index":10,"Rank":37,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.76,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":37,"index":12,"Rank":38,"Model":"Arabic_text_embedding_for_sts<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":85.05,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":38,"index":13,"Rank":39,"Model":"arabic_text_embedding_sts_arabertv02_arabicnlitriplet<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":84.96,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":39,"index":14,"Rank":40,"Model":"llm2vec-croissant-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":40,"index":15,"Rank":41,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":67.83,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":41,"index":16,"Rank":42,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.35} -{"level_0":42,"index":17,"Rank":43,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.84,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.83,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.38} -{"level_0":43,"index":18,"Rank":44,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.58,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":41.02,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.85} -{"level_0":44,"index":19,"Rank":45,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.04} -{"level_0":45,"index":20,"Rank":46,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.07} -{"level_0":46,"index":21,"Rank":47,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42} -{"level_0":47,"index":22,"Rank":48,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.42} -{"level_0":48,"index":23,"Rank":49,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.52} -{"level_0":49,"index":24,"Rank":50,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":83.28,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":42.79,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.25} -{"level_0":50,"index":25,"Rank":51,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.86} -{"level_0":51,"index":26,"Rank":52,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54} -{"level_0":52,"index":27,"Rank":53,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54} -{"level_0":53,"index":28,"Rank":54,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54} -{"level_0":54,"index":29,"Rank":55,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54} -{"level_0":55,"index":30,"Rank":56,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.58} -{"level_0":56,"index":31,"Rank":57,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.55} -{"level_0":57,"index":32,"Rank":58,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.8,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.46} -{"level_0":58,"index":33,"Rank":59,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.76,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.79} -{"level_0":59,"index":34,"Rank":60,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":33.88,"STSBenchmark":""} -{"level_0":60,"index":35,"Rank":61,"Model":"STS-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":0.98} -{"level_0":61,"index":36,"Rank":62,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":40.31,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":62,"index":37,"Rank":63,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":40.4,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":63,"index":38,"Rank":64,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":38.77,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":64,"index":39,"Rank":65,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.1} -{"level_0":65,"index":40,"Rank":66,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.43} -{"level_0":66,"index":41,"Rank":67,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":75.57} -{"level_0":67,"index":42,"Rank":68,"Model":"2024-06-15_10-09-42<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":68,"index":43,"Rank":69,"Model":"2024-06-17_21-37-12<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":69,"index":44,"Rank":70,"Model":"2024-06-19_08-22-22<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":70,"index":45,"Rank":71,"Model":"2024-06-19_10-03-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":71,"index":46,"Rank":72,"Model":"2024-06-19_21-12-17<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":72,"index":47,"Rank":73,"Model":"2024-06-19_22-23-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":73,"index":48,"Rank":74,"Model":"neural-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.91} -{"level_0":74,"index":50,"Rank":75,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.14,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":75,"index":51,"Rank":76,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":84.64,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":76,"index":52,"Rank":77,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.84,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":77,"index":53,"Rank":78,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.57,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":78,"index":54,"Rank":79,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.81} -{"level_0":79,"index":56,"Rank":80,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.72} -{"level_0":80,"index":57,"Rank":81,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.32} -{"level_0":81,"index":58,"Rank":82,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.05} -{"level_0":82,"index":59,"Rank":83,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.44} -{"level_0":83,"index":60,"Rank":84,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.65} -{"level_0":84,"index":61,"Rank":85,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":80.42} -{"level_0":85,"index":62,"Rank":86,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.16} -{"level_0":86,"index":63,"Rank":87,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":73.36} -{"level_0":87,"index":64,"Rank":88,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.34} -{"level_0":88,"index":65,"Rank":89,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.25} -{"level_0":89,"index":66,"Rank":90,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.21} -{"level_0":90,"index":67,"Rank":91,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":76.72} -{"level_0":91,"index":68,"Rank":92,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.79} -{"level_0":92,"index":69,"Rank":93,"Model":"test24<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.93} -{"level_0":93,"index":70,"Rank":94,"Model":"test25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.61} -{"level_0":94,"index":71,"Rank":95,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.59} -{"level_0":95,"index":74,"Rank":96,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.21} -{"level_0":96,"index":75,"Rank":97,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.39} -{"level_0":97,"index":76,"Rank":98,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.67} -{"level_0":98,"index":77,"Rank":99,"Model":"Arabert-all-nli-triplet-Matryoshka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":83.16,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":58.29,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":99,"index":78,"Rank":100,"Model":"Arabic-MiniLM-L12-v2-all-nli-triplet<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":81.11,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":52.41,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":100,"index":79,"Rank":101,"Model":"Arabic-Triplet-Matryoshka-V2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":85.31,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":101,"index":80,"Rank":102,"Model":"Arabic-all-nli-triplet-Matryoshka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":82.4,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":51.38,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":102,"index":81,"Rank":103,"Model":"Arabic-labse-Matryoshka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":82.47,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":57.26,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":103,"index":82,"Rank":104,"Model":"Arabic-mpnet-base-all-nli-triplet<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":79.93,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":52.44,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":104,"index":83,"Rank":105,"Model":"Marbert-all-nli-triplet-Matryoshka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":82.18,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":58.08,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":105,"index":84,"Rank":106,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":83.31,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":106,"index":85,"Rank":107,"Model":"st-polish-kartonberta-base-alpha-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":36.78,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":107,"index":86,"Rank":108,"Model":"e5-large-v2-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.74} -{"level_0":108,"index":87,"Rank":109,"Model":"bge_m3e_stella<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":61.83} -{"level_0":109,"index":88,"Rank":110,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.6} -{"level_0":110,"index":89,"Rank":111,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.0} -{"level_0":111,"index":90,"Rank":112,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":52.67} -{"level_0":112,"index":91,"Rank":113,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":75.68} -{"level_0":113,"index":92,"Rank":114,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":74.1} -{"level_0":114,"index":93,"Rank":115,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.9} -{"level_0":115,"index":94,"Rank":116,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.19} -{"level_0":116,"index":95,"Rank":117,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.18} -{"level_0":117,"index":96,"Rank":118,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.32} -{"level_0":118,"index":97,"Rank":119,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.5} -{"level_0":119,"index":98,"Rank":120,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.38} -{"level_0":120,"index":99,"Rank":121,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.06} -{"level_0":121,"index":100,"Rank":122,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":78.77,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":122,"index":101,"Rank":123,"Model":"German_Semantic_STS_V2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":123,"index":102,"Rank":124,"Model":"UAE-Large-V1-Q5_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.06} -{"level_0":124,"index":104,"Rank":125,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.94} -{"level_0":125,"index":105,"Rank":126,"Model":"bge-large-en-v1.5-Q4_K_S-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.52} -{"level_0":126,"index":106,"Rank":127,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.32} -{"level_0":127,"index":107,"Rank":128,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.4} -{"level_0":128,"index":108,"Rank":129,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.3} -{"level_0":129,"index":109,"Rank":130,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.08} -{"level_0":130,"index":110,"Rank":131,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.98} -{"level_0":131,"index":111,"Rank":132,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42} -{"level_0":132,"index":112,"Rank":133,"Model":"snowflake-arctic-embed-l-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":75.68} -{"level_0":133,"index":113,"Rank":134,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":47.29} -{"level_0":134,"index":114,"Rank":135,"Model":"sgpt-bloom-1b7-nli<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":73.13,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":135,"index":116,"Rank":136,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.85} -{"level_0":136,"index":117,"Rank":137,"Model":"NoInstruct-small-Embedding-v0-Q4_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.98} -{"level_0":137,"index":118,"Rank":138,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.58,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":41.02,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.85} -{"level_0":138,"index":119,"Rank":139,"Model":"cai-lunaris-text-embeddings<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":71.4} -{"level_0":139,"index":120,"Rank":140,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54} -{"level_0":140,"index":121,"Rank":141,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":77.54,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":141,"index":122,"Rank":142,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.73,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":142,"index":123,"Rank":143,"Model":"embedder-100p<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.2} -{"level_0":143,"index":124,"Rank":144,"Model":"mxbai-embed-large-v1-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.1} -{"level_0":144,"index":125,"Rank":145,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":75.98} -{"level_0":145,"index":126,"Rank":146,"Model":"jina-embeddings-v2-base-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.84} -{"level_0":146,"index":127,"Rank":147,"Model":"jina-embeddings-v2-small-en-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.04} -{"level_0":147,"index":128,"Rank":148,"Model":"stella-base-en-v2-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.45} -{"level_0":148,"index":129,"Rank":149,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.23} -{"level_0":149,"index":130,"Rank":150,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.74} -{"level_0":150,"index":131,"Rank":151,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.52} -{"level_0":151,"index":133,"Rank":152,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":65.37,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":152,"index":134,"Rank":153,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":55.15,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":153,"index":135,"Rank":154,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":48.52,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":154,"index":136,"Rank":155,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":39.05,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":155,"index":137,"Rank":156,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":56.47,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":156,"index":138,"Rank":157,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.43} -{"level_0":157,"index":139,"Rank":158,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.88} -{"level_0":158,"index":140,"Rank":159,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.56} -{"level_0":159,"index":141,"Rank":160,"Model":"bge-large-en-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.52} -{"level_0":160,"index":142,"Rank":161,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.45} -{"level_0":161,"index":143,"Rank":162,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.18} -{"level_0":162,"index":144,"Rank":163,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.52} -{"level_0":163,"index":145,"Rank":164,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.21} -{"level_0":164,"index":146,"Rank":165,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.74} -{"level_0":165,"index":148,"Rank":166,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.36} -{"level_0":166,"index":149,"Rank":167,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.95} -{"level_0":167,"index":152,"Rank":168,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.38} -{"level_0":168,"index":154,"Rank":169,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":39.73,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":169,"index":155,"Rank":170,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":38.69,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":170,"index":156,"Rank":171,"Model":"UAE-Large-V1-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.06} -{"level_0":171,"index":158,"Rank":172,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.71} -{"level_0":172,"index":160,"Rank":173,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.02} -{"level_0":173,"index":161,"Rank":174,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.33} -{"level_0":174,"index":162,"Rank":175,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.57} -{"level_0":175,"index":163,"Rank":176,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.6} -{"level_0":176,"index":164,"Rank":177,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.2} -{"level_0":177,"index":165,"Rank":178,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":86.72,"STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":59.07,"STS22 (de-en)":55.97,"STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.14} -{"level_0":178,"index":166,"Rank":179,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.84} -{"level_0":179,"index":167,"Rank":180,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":86.49,"STS17 (es-es)":88.25,"STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":67.97,"STS22 (es-en)":78.8,"STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.84} -{"level_0":180,"index":168,"Rank":181,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.04} -{"level_0":181,"index":169,"Rank":182,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.67} -{"level_0":182,"index":170,"Rank":183,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42} -{"level_0":183,"index":171,"Rank":184,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42} -{"level_0":184,"index":172,"Rank":185,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42} -{"level_0":185,"index":173,"Rank":186,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42} -{"level_0":186,"index":174,"Rank":187,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.69} -{"level_0":187,"index":175,"Rank":188,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.5} -{"level_0":188,"index":176,"Rank":189,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.46} -{"level_0":189,"index":177,"Rank":190,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.77} -{"level_0":190,"index":178,"Rank":191,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.7,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":191,"index":179,"Rank":192,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":75.66,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":192,"index":180,"Rank":193,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":78.68,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":193,"index":181,"Rank":194,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":80.8,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":194,"index":182,"Rank":195,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.35,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":195,"index":183,"Rank":196,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.2} -{"level_0":196,"index":184,"Rank":197,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.1} -{"level_0":197,"index":185,"Rank":198,"Model":"b1ade-embed-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.77} -{"level_0":198,"index":186,"Rank":199,"Model":"bge-base-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.79} -{"level_0":199,"index":187,"Rank":200,"Model":"bge-base-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.93} -{"level_0":200,"index":188,"Rank":201,"Model":"bge-large-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.89} -{"level_0":201,"index":189,"Rank":202,"Model":"bge-large-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.88} -{"level_0":202,"index":190,"Rank":203,"Model":"bge-small-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.31} -{"level_0":203,"index":191,"Rank":204,"Model":"bge-small-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.77} -{"level_0":204,"index":192,"Rank":205,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.84,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.83,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.38} -{"level_0":205,"index":193,"Rank":206,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.58,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":41.02,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.85} -{"level_0":206,"index":194,"Rank":207,"Model":"mmarco-bert-base-italian-uncased<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":69.44,"STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":207,"index":195,"Rank":208,"Model":"mmarco-sentence-flare-it<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":37.93,"STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":208,"index":196,"Rank":209,"Model":"stsbm-sentence-flare-it<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":65.71,"STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":209,"index":197,"Rank":210,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54} -{"level_0":210,"index":198,"Rank":211,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.42} -{"level_0":211,"index":199,"Rank":212,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.96} -{"level_0":212,"index":200,"Rank":213,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.28} -{"level_0":213,"index":201,"Rank":214,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.8} -{"level_0":214,"index":202,"Rank":215,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.14} -{"level_0":215,"index":203,"Rank":216,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.46} -{"level_0":216,"index":204,"Rank":217,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.47} -{"level_0":217,"index":205,"Rank":218,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.81} -{"level_0":218,"index":206,"Rank":219,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.14} -{"level_0":219,"index":207,"Rank":220,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.25} -{"level_0":220,"index":208,"Rank":221,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":76.52} -{"level_0":221,"index":209,"Rank":222,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.15} -{"level_0":222,"index":210,"Rank":223,"Model":"mmlw-e5-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.36,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":223,"index":211,"Rank":224,"Model":"mmlw-e5-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.4,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":224,"index":212,"Rank":225,"Model":"mmlw-e5-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.63,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":225,"index":213,"Rank":226,"Model":"mmlw-roberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.36,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":226,"index":214,"Rank":227,"Model":"mmlw-roberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":39.32,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":227,"index":215,"Rank":228,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.36,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":228,"index":216,"Rank":229,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":37.34,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":229,"index":220,"Rank":230,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.42} -{"level_0":230,"index":222,"Rank":231,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":61.54} -{"level_0":231,"index":225,"Rank":232,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.58} -{"level_0":232,"index":228,"Rank":233,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.73} -{"level_0":233,"index":229,"Rank":234,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":76.97} -{"level_0":234,"index":230,"Rank":235,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.62,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":235,"index":231,"Rank":236,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","STS17 (ar-ar)":79.16,"STS17 (en-ar)":81.22,"STS17 (en-de)":84.22,"STS17 (en-tr)":76.74,"STS17 (es-en)":84.44,"STS17 (es-es)":85.56,"STS17 (fr-en)":76.59,"STS17 (it-en)":82.35,"STS17 (ko-ko)":77.03,"STS17 (nl-en)":81.71,"STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":70.55,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":33.73,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.42} -{"level_0":236,"index":232,"Rank":237,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":79.1,"STS17 (en-ar)":80.85,"STS17 (en-de)":83.28,"STS17 (en-tr)":74.9,"STS17 (es-en)":86.11,"STS17 (es-es)":85.14,"STS17 (fr-en)":81.17,"STS17 (it-en)":84.24,"STS17 (ko-ko)":83.41,"STS17 (nl-en)":82.51,"STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.3,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":33.64,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.82} -{"level_0":237,"index":236,"Rank":238,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":76.8,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.01} -{"level_0":238,"index":237,"Rank":239,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":27.95,"STS22 (de)":8.16,"STS22 (de-en)":21.55,"STS22 (de-fr)":17.5,"STS22 (de-pl)":25.53,"STS22 (es)":45.31,"STS22 (es-en)":42.77,"STS22 (es-it)":32.83,"STS22 (fr)":42.0,"STS22 (fr-pl)":39.44,"STS22 (it)":39.69,"STS22 (pl)":9.71,"STS22 (pl-en)":42.08,"STS22 (ru)":60.06,"STS22 (tr)":15.46,"STS22 (zh-en)":31.25,"STSBenchmark":""} -{"level_0":239,"index":239,"Rank":240,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.1,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":240,"index":240,"Rank":241,"Model":"gte-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.73} -{"level_0":241,"index":241,"Rank":242,"Model":"gte-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.07} -{"level_0":242,"index":242,"Rank":243,"Model":"gte-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.57} -{"level_0":243,"index":243,"Rank":244,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":44.39} -{"level_0":244,"index":244,"Rank":245,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":68.04} -{"level_0":245,"index":245,"Rank":246,"Model":"universal-sentence-encoder-4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.08} -{"level_0":246,"index":246,"Rank":247,"Model":"universal-sentence-encoder-large-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.93} -{"level_0":247,"index":247,"Rank":248,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":77.91,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":80.28} -{"level_0":248,"index":248,"Rank":249,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":71.11,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.24} -{"level_0":249,"index":249,"Rank":250,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.77} -{"level_0":250,"index":250,"Rank":251,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.69} -{"level_0":251,"index":251,"Rank":252,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":0.8} -{"level_0":252,"index":252,"Rank":253,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":56.72,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":253,"index":253,"Rank":254,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":55.49,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":254,"index":254,"Rank":255,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":80.31,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""} -{"level_0":255,"index":255,"Rank":256,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.92} -{"level_0":256,"index":256,"Rank":257,"Model":"gte-large-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.4} -{"level_0":257,"index":257,"Rank":258,"Model":"gte-large-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.45} -{"level_0":258,"index":258,"Rank":259,"Model":"gte-small-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.13} -{"level_0":259,"index":259,"Rank":260,"Model":"bi-cse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":70.01,"STSBenchmark":85.99} -{"level_0":260,"index":260,"Rank":261,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.78} -{"level_0":261,"index":261,"Rank":262,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.32} -{"level_0":262,"index":262,"Rank":263,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.02} -{"level_0":263,"index":263,"Rank":264,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.08} -{"level_0":264,"index":264,"Rank":265,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.09,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":64.5,"STS22 (zh-en)":"","STSBenchmark":83.17} -{"level_0":265,"index":265,"Rank":266,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.34} -{"level_0":266,"index":266,"Rank":267,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.56} -{"level_0":267,"index":267,"Rank":268,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.24} -{"level_0":268,"index":268,"Rank":269,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.54} +{"Rank":1,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":67.9,"STS17 (ar-ar)":69.07,"STS17 (en-ar)":74.51,"STS17 (en-de)":73.85,"STS17 (en-tr)":72.07,"STS17 (es-en)":65.71,"STS17 (es-es)":80.83,"STS17 (fr-en)":76.98,"STS17 (it-en)":76.99,"STS17 (ko-ko)":71.32,"STS17 (nl-en)":75.22,"STS22 (ar)":57.67,"STS22 (de)":48.58,"STS22 (de-en)":50.14,"STS22 (de-fr)":53.28,"STS22 (de-pl)":58.69,"STS22 (es)":63.18,"STS22 (es-en)":71.86,"STS22 (es-it)":69.69,"STS22 (fr)":77.95,"STS22 (fr-pl)":61.98,"STS22 (it)":72.22,"STS22 (pl)":39.28,"STS22 (pl-en)":69.41,"STS22 (ru)":57.49,"STS22 (tr)":58.15,"STS22 (zh-en)":64.02,"STSBenchmark":72.25,"STSBenchmarkMultilingualSTS (cmn-Hans)":69.5,"STSBenchmarkMultilingualSTS (deu-Latn)":72.43,"STSBenchmarkMultilingualSTS (en)":72.25,"STSBenchmarkMultilingualSTS (fr)":75.1,"STSBenchmarkMultilingualSTS (fra-Latn)":75.1,"STSBenchmarkMultilingualSTS (ita-Latn)":72.97,"STSBenchmarkMultilingualSTS (nld-Latn)":70.22,"STSBenchmarkMultilingualSTS (pol-Latn)":72.58,"STSBenchmarkMultilingualSTS (por-Latn)":71.65,"STSBenchmarkMultilingualSTS (rus-Cyrl)":73.06,"STSBenchmarkMultilingualSTS (spa-Latn)":72.92} +{"Rank":2,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":44.27,"STS17 (ar-ar)":50.89,"STS17 (en-ar)":-4.28,"STS17 (en-de)":35.82,"STS17 (en-tr)":4.5,"STS17 (es-en)":16.31,"STS17 (es-es)":76.12,"STS17 (fr-en)":37.09,"STS17 (it-en)":24.45,"STS17 (ko-ko)":43.39,"STS17 (nl-en)":29.0,"STS22 (ar)":22.64,"STS22 (de)":31.04,"STS22 (de-en)":44.04,"STS22 (de-fr)":30.07,"STS22 (de-pl)":4.93,"STS22 (es)":54.78,"STS22 (es-en)":53.42,"STS22 (es-it)":44.27,"STS22 (fr)":77.0,"STS22 (fr-pl)":50.71,"STS22 (it)":60.4,"STS22 (pl)":26.77,"STS22 (pl-en)":32.8,"STS22 (ru)":14.72,"STS22 (tr)":33.69,"STS22 (zh-en)":41.64,"STSBenchmark":82.03,"STSBenchmarkMultilingualSTS (cmn-Hans)":39.74,"STSBenchmarkMultilingualSTS (deu-Latn)":62.4,"STSBenchmarkMultilingualSTS (en)":82.03,"STSBenchmarkMultilingualSTS (fr)":64.93,"STSBenchmarkMultilingualSTS (fra-Latn)":64.93,"STSBenchmarkMultilingualSTS (ita-Latn)":59.24,"STSBenchmarkMultilingualSTS (nld-Latn)":55.46,"STSBenchmarkMultilingualSTS (pol-Latn)":56.42,"STSBenchmarkMultilingualSTS (por-Latn)":61.56,"STSBenchmarkMultilingualSTS (rus-Cyrl)":55.55,"STSBenchmarkMultilingualSTS (spa-Latn)":61.62} +{"Rank":3,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.33,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":4,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.99,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":5,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":74.85,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":6,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.74,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":79.72,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":7,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":70.51,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":76.43,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":8,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":79.99,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":79.02,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":9,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.22,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":10,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":83.75,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":83.02,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":11,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.86,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":12,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.56,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":13,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.76,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":82.72,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":14,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","STS17 (ar-ar)":67.47,"STS17 (en-ar)":65.05,"STS17 (en-de)":66.66,"STS17 (en-tr)":70.05,"STS17 (es-en)":55.3,"STS17 (es-es)":79.67,"STS17 (fr-en)":70.82,"STS17 (it-en)":70.98,"STS17 (ko-ko)":70.52,"STS17 (nl-en)":68.12,"STS22 (ar)":42.57,"STS22 (de)":25.69,"STS22 (de-en)":32.35,"STS22 (de-fr)":37.41,"STS22 (de-pl)":15.67,"STS22 (es)":54.92,"STS22 (es-en)":54.34,"STS22 (es-it)":42.21,"STS22 (fr)":58.61,"STS22 (fr-pl)":39.44,"STS22 (it)":60.31,"STS22 (pl)":18.34,"STS22 (pl-en)":53.63,"STS22 (ru)":39.24,"STS22 (tr)":36.97,"STS22 (zh-en)":46.19,"STSBenchmark":69.77,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":69.82,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":15,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.35,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":16,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":79.27,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":17,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.8,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":76.48,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":18,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.76,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":81.84,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":19,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":61.6,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":20,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":53.76,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":21,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":66.71,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":22,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":40.31,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":52.25,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":23,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":40.4,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":52.25,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":24,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":38.77,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":52.25,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":25,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.1,"STSBenchmarkMultilingualSTS (cmn-Hans)":75.27,"STSBenchmarkMultilingualSTS (deu-Latn)":77.57,"STSBenchmarkMultilingualSTS (en)":83.12,"STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":77.48,"STSBenchmarkMultilingualSTS (ita-Latn)":76.24,"STSBenchmarkMultilingualSTS (nld-Latn)":74.83,"STSBenchmarkMultilingualSTS (pol-Latn)":74.67,"STSBenchmarkMultilingualSTS (por-Latn)":76.61,"STSBenchmarkMultilingualSTS (rus-Cyrl)":76.19,"STSBenchmarkMultilingualSTS (spa-Latn)":79.51} +{"Rank":26,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.72,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":27,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.32,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":28,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.05,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":29,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.44,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":30,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.65,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":31,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":80.42,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":32,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.16,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":33,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":73.36,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":34,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":78.77,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":79.23,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":35,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":36,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":37,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":47.29,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":38,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":73.02,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":39,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":40,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":41,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":77.54,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":81.64,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":42,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.73,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":85.79,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":43,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":81.81,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":44,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":82.96,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":45,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":58.45,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":46,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":65.37,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":37.14,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":47,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":55.15,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":33.41,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":48,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":48.52,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":15.66,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":49,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":39.05,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":52.25,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":50,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":56.47,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":54.97,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":51,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":69.82,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":61.87,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":84.25,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":52,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":34.07,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.64,"STSBenchmarkMultilingualSTS (cmn-Hans)":79.87,"STSBenchmarkMultilingualSTS (deu-Latn)":79.68,"STSBenchmarkMultilingualSTS (en)":85.64,"STSBenchmarkMultilingualSTS (fr)":80.62,"STSBenchmarkMultilingualSTS (fra-Latn)":80.85,"STSBenchmarkMultilingualSTS (ita-Latn)":78.09,"STSBenchmarkMultilingualSTS (nld-Latn)":75.96,"STSBenchmarkMultilingualSTS (pol-Latn)":74.93,"STSBenchmarkMultilingualSTS (por-Latn)":67.16,"STSBenchmarkMultilingualSTS (rus-Cyrl)":79.33,"STSBenchmarkMultilingualSTS (spa-Latn)":81.75} +{"Rank":53,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":34.66,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.29,"STSBenchmarkMultilingualSTS (cmn-Hans)":81.22,"STSBenchmarkMultilingualSTS (deu-Latn)":84.27,"STSBenchmarkMultilingualSTS (en)":87.29,"STSBenchmarkMultilingualSTS (fr)":82.53,"STSBenchmarkMultilingualSTS (fra-Latn)":83.28,"STSBenchmarkMultilingualSTS (ita-Latn)":81.75,"STSBenchmarkMultilingualSTS (nld-Latn)":81.63,"STSBenchmarkMultilingualSTS (pol-Latn)":81.06,"STSBenchmarkMultilingualSTS (por-Latn)":73.31,"STSBenchmarkMultilingualSTS (rus-Cyrl)":83.05,"STSBenchmarkMultilingualSTS (spa-Latn)":83.81} +{"Rank":54,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":35.8,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.11,"STSBenchmarkMultilingualSTS (cmn-Hans)":78.49,"STSBenchmarkMultilingualSTS (deu-Latn)":79.17,"STSBenchmarkMultilingualSTS (en)":84.11,"STSBenchmarkMultilingualSTS (fr)":79.32,"STSBenchmarkMultilingualSTS (fra-Latn)":79.2,"STSBenchmarkMultilingualSTS (ita-Latn)":78.21,"STSBenchmarkMultilingualSTS (nld-Latn)":76.04,"STSBenchmarkMultilingualSTS (pol-Latn)":72.61,"STSBenchmarkMultilingualSTS (por-Latn)":77.39,"STSBenchmarkMultilingualSTS (rus-Cyrl)":78.24,"STSBenchmarkMultilingualSTS (spa-Latn)":80.31} +{"Rank":55,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":39.73,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":56,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":38.69,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":57,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":77.1,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":49.97,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":58,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":61.35,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":36.78,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":59,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.28,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":60,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.8,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":61,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.14,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":62,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.46,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":63,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.81,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":64,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.25,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":65,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":76.52,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":66,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.36,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":67,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":37.34,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":68,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":58.71,"STS17 (en-ar)":0.54,"STS17 (en-de)":27.54,"STS17 (en-tr)":0.43,"STS17 (es-en)":22.01,"STS17 (es-es)":78.37,"STS17 (fr-en)":30.7,"STS17 (it-en)":24.28,"STS17 (ko-ko)":43.37,"STS17 (nl-en)":24.51,"STS22 (ar)":17.54,"STS22 (de)":22.53,"STS22 (de-en)":42.86,"STS22 (de-fr)":43.52,"STS22 (de-pl)":1.63,"STS22 (es)":43.98,"STS22 (es-en)":53.99,"STS22 (es-it)":40.71,"STS22 (fr)":69.51,"STS22 (fr-pl)":16.9,"STS22 (it)":47.48,"STS22 (pl)":19.22,"STS22 (pl-en)":42.67,"STS22 (ru)":11.19,"STS22 (tr)":21.6,"STS22 (zh-en)":44.39,"STSBenchmark":83.09,"STSBenchmarkMultilingualSTS (cmn-Hans)":38.93,"STSBenchmarkMultilingualSTS (deu-Latn)":63.28,"STSBenchmarkMultilingualSTS (en)":83.09,"STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":66.68,"STSBenchmarkMultilingualSTS (ita-Latn)":60.71,"STSBenchmarkMultilingualSTS (nld-Latn)":60.03,"STSBenchmarkMultilingualSTS (pol-Latn)":60.2,"STSBenchmarkMultilingualSTS (por-Latn)":63.85,"STSBenchmarkMultilingualSTS (rus-Cyrl)":56.09,"STSBenchmarkMultilingualSTS (spa-Latn)":65.33} +{"Rank":69,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.42,"STSBenchmarkMultilingualSTS (cmn-Hans)":39.43,"STSBenchmarkMultilingualSTS (deu-Latn)":61.43,"STSBenchmarkMultilingualSTS (en)":83.42,"STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":65.15,"STSBenchmarkMultilingualSTS (ita-Latn)":62.72,"STSBenchmarkMultilingualSTS (nld-Latn)":57.01,"STSBenchmarkMultilingualSTS (pol-Latn)":52.36,"STSBenchmarkMultilingualSTS (por-Latn)":62.12,"STSBenchmarkMultilingualSTS (rus-Cyrl)":55.54,"STSBenchmarkMultilingualSTS (spa-Latn)":65.78} +{"Rank":70,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":27.14,"STS17 (en-ar)":6.9,"STS17 (en-de)":11.59,"STS17 (en-tr)":6.46,"STS17 (es-en)":10.86,"STS17 (es-es)":55.45,"STS17 (fr-en)":16.02,"STS17 (it-en)":19.87,"STS17 (ko-ko)":8.08,"STS17 (nl-en)":24.92,"STS22 (ar)":19.57,"STS22 (de)":17.31,"STS22 (de-en)":26.03,"STS22 (de-fr)":10.26,"STS22 (de-pl)":16.94,"STS22 (es)":48.89,"STS22 (es-en)":51.79,"STS22 (es-it)":25.24,"STS22 (fr)":53.92,"STS22 (fr-pl)":39.44,"STS22 (it)":39.43,"STS22 (pl)":13.56,"STS22 (pl-en)":25.36,"STS22 (ru)":1.11,"STS22 (tr)":31.73,"STS22 (zh-en)":8.44,"STSBenchmark":61.26,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":71,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":61.54,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":72,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":"","STS17 (ar-ar)":13.78,"STS17 (en-ar)":9.08,"STS17 (en-de)":-3.11,"STS17 (en-tr)":-0.45,"STS17 (es-en)":-8.18,"STS17 (es-es)":48.23,"STS17 (fr-en)":5.81,"STS17 (it-en)":3.64,"STS17 (ko-ko)":2.54,"STS17 (nl-en)":0.44,"STS22 (ar)":32.42,"STS22 (de)":33.04,"STS22 (de-en)":28.65,"STS22 (de-fr)":14.77,"STS22 (de-pl)":11.21,"STS22 (es)":48.53,"STS22 (es-en)":26.97,"STS22 (es-it)":41.1,"STS22 (fr)":49.43,"STS22 (fr-pl)":39.44,"STS22 (it)":57.77,"STS22 (pl)":12.47,"STS22 (pl-en)":45.55,"STS22 (ru)":19.44,"STS22 (tr)":47.38,"STS22 (zh-en)":14.05,"STSBenchmark":61.55,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":73,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","STS17 (ar-ar)":77.34,"STS17 (en-ar)":77.46,"STS17 (en-de)":80.24,"STS17 (en-tr)":74.34,"STS17 (es-en)":77.4,"STS17 (es-es)":83.71,"STS17 (fr-en)":79.28,"STS17 (it-en)":80.82,"STS17 (ko-ko)":76.4,"STS17 (nl-en)":80.51,"STS22 (ar)":49.04,"STS22 (de)":35.73,"STS22 (de-en)":47.51,"STS22 (de-fr)":60.76,"STS22 (de-pl)":36.09,"STS22 (es)":59.34,"STS22 (es-en)":68.96,"STS22 (es-it)":63.28,"STS22 (fr)":76.41,"STS22 (fr-pl)":61.98,"STS22 (it)":65.1,"STS22 (pl)":34.58,"STS22 (pl-en)":71.33,"STS22 (ru)":52.4,"STS22 (tr)":54.07,"STS22 (zh-en)":61.75,"STSBenchmark":80.75,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":77.49,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":74,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.58,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":75,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","STS17 (ar-ar)":10.19,"STS17 (en-ar)":-5.77,"STS17 (en-de)":67.43,"STS17 (en-tr)":8.75,"STS17 (es-en)":54.96,"STS17 (es-es)":82.74,"STS17 (fr-en)":60.5,"STS17 (it-en)":46.26,"STS17 (ko-ko)":8.96,"STS17 (nl-en)":47.48,"STS22 (ar)":34.97,"STS22 (de)":51.7,"STS22 (de-en)":48.76,"STS22 (de-fr)":57.5,"STS22 (de-pl)":32.76,"STS22 (es)":57.49,"STS22 (es-en)":67.76,"STS22 (es-it)":57.18,"STS22 (fr)":78.7,"STS22 (fr-pl)":61.98,"STS22 (it)":67.67,"STS22 (pl)":30.68,"STS22 (pl-en)":54.17,"STS22 (ru)":15.36,"STS22 (tr)":58.12,"STS22 (zh-en)":29.42,"STSBenchmark":77.6,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":76,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","STS17 (ar-ar)":9.06,"STS17 (en-ar)":-3.22,"STS17 (en-de)":70.38,"STS17 (en-tr)":17.17,"STS17 (es-en)":60.24,"STS17 (es-es)":81.93,"STS17 (fr-en)":62.17,"STS17 (it-en)":59.11,"STS17 (ko-ko)":8.9,"STS17 (nl-en)":56.91,"STS22 (ar)":37.66,"STS22 (de)":50.58,"STS22 (de-en)":53.63,"STS22 (de-fr)":55.72,"STS22 (de-pl)":27.99,"STS22 (es)":59.14,"STS22 (es-en)":69.99,"STS22 (es-it)":60.94,"STS22 (fr)":79.43,"STS22 (fr-pl)":61.98,"STS22 (it)":67.14,"STS22 (pl)":33.74,"STS22 (pl-en)":60.18,"STS22 (ru)":32.69,"STS22 (tr)":55.79,"STS22 (zh-en)":28.85,"STSBenchmark":77.65,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":77,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.73,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":78,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":76.97,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":79,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.62,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":63.85,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":80,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","STS17 (ar-ar)":79.16,"STS17 (en-ar)":81.22,"STS17 (en-de)":84.22,"STS17 (en-tr)":76.74,"STS17 (es-en)":84.44,"STS17 (es-es)":85.56,"STS17 (fr-en)":76.59,"STS17 (it-en)":82.35,"STS17 (ko-ko)":77.03,"STS17 (nl-en)":81.71,"STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":70.55,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":33.73,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.42,"STSBenchmarkMultilingualSTS (cmn-Hans)":80.47,"STSBenchmarkMultilingualSTS (deu-Latn)":78.87,"STSBenchmarkMultilingualSTS (en)":84.42,"STSBenchmarkMultilingualSTS (fr)":79.9,"STSBenchmarkMultilingualSTS (fra-Latn)":79.9,"STSBenchmarkMultilingualSTS (ita-Latn)":80.39,"STSBenchmarkMultilingualSTS (nld-Latn)":79.54,"STSBenchmarkMultilingualSTS (pol-Latn)":78.29,"STSBenchmarkMultilingualSTS (por-Latn)":80.16,"STSBenchmarkMultilingualSTS (rus-Cyrl)":79.32,"STSBenchmarkMultilingualSTS (spa-Latn)":81.1} +{"Rank":81,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":79.1,"STS17 (en-ar)":80.85,"STS17 (en-de)":83.28,"STS17 (en-tr)":74.9,"STS17 (es-en)":86.11,"STS17 (es-es)":85.14,"STS17 (fr-en)":81.17,"STS17 (it-en)":84.24,"STS17 (ko-ko)":83.41,"STS17 (nl-en)":82.51,"STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.3,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":33.64,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.82,"STSBenchmarkMultilingualSTS (cmn-Hans)":81.98,"STSBenchmarkMultilingualSTS (deu-Latn)":83.56,"STSBenchmarkMultilingualSTS (en)":86.82,"STSBenchmarkMultilingualSTS (fr)":84.69,"STSBenchmarkMultilingualSTS (fra-Latn)":84.69,"STSBenchmarkMultilingualSTS (ita-Latn)":84.09,"STSBenchmarkMultilingualSTS (nld-Latn)":83.36,"STSBenchmarkMultilingualSTS (pol-Latn)":81.46,"STSBenchmarkMultilingualSTS (por-Latn)":84.0,"STSBenchmarkMultilingualSTS (rus-Cyrl)":82.45,"STSBenchmarkMultilingualSTS (spa-Latn)":84.61} +{"Rank":82,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":13.36,"STS17 (en-ar)":-5.65,"STS17 (en-de)":67.11,"STS17 (en-tr)":-0.02,"STS17 (es-en)":47.72,"STS17 (es-es)":79.94,"STS17 (fr-en)":56.61,"STS17 (it-en)":30.46,"STS17 (ko-ko)":10.06,"STS17 (nl-en)":36.46,"STS22 (ar)":31.2,"STS22 (de)":42.08,"STS22 (de-en)":46.9,"STS22 (de-fr)":55.04,"STS22 (de-pl)":33.94,"STS22 (es)":53.81,"STS22 (es-en)":65.19,"STS22 (es-it)":55.29,"STS22 (fr)":77.69,"STS22 (fr-pl)":28.17,"STS22 (it)":60.65,"STS22 (pl)":24.42,"STS22 (pl-en)":42.97,"STS22 (ru)":12.13,"STS22 (tr)":40.45,"STS22 (zh-en)":20.15,"STSBenchmark":85.52,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":74.04,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":83,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","STS17 (ar-ar)":10.75,"STS17 (en-ar)":-4.71,"STS17 (en-de)":73.62,"STS17 (en-tr)":-0.42,"STS17 (es-en)":62.62,"STS17 (es-es)":82.74,"STS17 (fr-en)":67.86,"STS17 (it-en)":51.86,"STS17 (ko-ko)":9.44,"STS17 (nl-en)":45.95,"STS22 (ar)":27.01,"STS22 (de)":43.73,"STS22 (de-en)":49.93,"STS22 (de-fr)":61.58,"STS22 (de-pl)":38.83,"STS22 (es)":57.68,"STS22 (es-en)":68.09,"STS22 (es-it)":61.58,"STS22 (fr)":75.01,"STS22 (fr-pl)":5.63,"STS22 (it)":62.01,"STS22 (pl)":25.0,"STS22 (pl-en)":51.72,"STS22 (ru)":14.21,"STS22 (tr)":47.3,"STS22 (zh-en)":23.1,"STSBenchmark":85.36,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":77.59,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":84,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","STS17 (ar-ar)":11.13,"STS17 (en-ar)":-3.93,"STS17 (en-de)":79.04,"STS17 (en-tr)":13.61,"STS17 (es-en)":71.72,"STS17 (es-es)":83.42,"STS17 (fr-en)":71.38,"STS17 (it-en)":69.5,"STS17 (ko-ko)":9.61,"STS17 (nl-en)":66.12,"STS22 (ar)":29.6,"STS22 (de)":47.72,"STS22 (de-en)":49.64,"STS22 (de-fr)":62.21,"STS22 (de-pl)":34.34,"STS22 (es)":58.16,"STS22 (es-en)":69.15,"STS22 (es-it)":65.26,"STS22 (fr)":77.49,"STS22 (fr-pl)":50.71,"STS22 (it)":66.91,"STS22 (pl)":27.04,"STS22 (pl-en)":58.85,"STS22 (ru)":26.63,"STS22 (tr)":43.36,"STS22 (zh-en)":29.0,"STSBenchmark":83.93,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":79.42,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":85,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":76.8,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.01,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":81.24,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":86,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":81.81,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":87,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":78.12,"STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":88,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.1,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":83.48,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":89,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":77.91,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":75.48,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":90,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":71.11,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":78.16,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":91,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":56.72,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":46.23,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":92,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":55.49,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":"","STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":42.32,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":93,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.78,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":94,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.32,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":95,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.02,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":96,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.08,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":97,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.09,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":64.5,"STS22 (zh-en)":"","STSBenchmark":83.17,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":77.55,"STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":98,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.34,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":99,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.56,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":100,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.24,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} +{"Rank":101,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.54,"STSBenchmarkMultilingualSTS (cmn-Hans)":"","STSBenchmarkMultilingualSTS (deu-Latn)":"","STSBenchmarkMultilingualSTS (en)":"","STSBenchmarkMultilingualSTS (fr)":"","STSBenchmarkMultilingualSTS (fra-Latn)":"","STSBenchmarkMultilingualSTS (ita-Latn)":"","STSBenchmarkMultilingualSTS (nld-Latn)":"","STSBenchmarkMultilingualSTS (pol-Latn)":"","STSBenchmarkMultilingualSTS (por-Latn)":"","STSBenchmarkMultilingualSTS (rus-Cyrl)":"","STSBenchmarkMultilingualSTS (spa-Latn)":""} diff --git a/boards_data/pl/data_overall/default.jsonl b/boards_data/pl/data_overall/default.jsonl index 041713a35621379d0b199e8fb6cde94dc2a5bb19..42469c40a747bcb12baa42f1f31f9f1850c11010 100644 --- a/boards_data/pl/data_overall/default.jsonl +++ b/boards_data/pl/data_overall/default.jsonl @@ -1,40 +1,23 @@ -{"index":2,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Embedding Dimensions":3584,"Max Tokens":32768,"Average (26 datasets)":67.86,"Classification Average (7 datasets)":77.84,"Clustering Average (1 datasets)":51.36,"PairClassification Average (4 datasets)":88.48,"Retrieval Average (11 datasets)":54.69,"STS Average (3 datasets)":70.86} -{"index":22,"Rank":2,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":67.86,"Classification Average (7 datasets)":77.84,"Clustering Average (1 datasets)":51.36,"PairClassification Average (4 datasets)":88.48,"Retrieval Average (11 datasets)":54.69,"STS Average (3 datasets)":70.86} -{"index":34,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":67.86,"Classification Average (7 datasets)":77.84,"Clustering Average (1 datasets)":51.36,"PairClassification Average (4 datasets)":88.48,"Retrieval Average (11 datasets)":54.69,"STS Average (3 datasets)":70.86} -{"index":1,"Rank":4,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":64.04,"Classification Average (7 datasets)":72.29,"Clustering Average (1 datasets)":44.59,"PairClassification Average (4 datasets)":84.87,"Retrieval Average (11 datasets)":51.88,"STS Average (3 datasets)":68.12} -{"index":33,"Rank":5,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":64.04,"Classification Average (7 datasets)":72.29,"Clustering Average (1 datasets)":44.59,"PairClassification Average (4 datasets)":84.87,"Retrieval Average (11 datasets)":51.88,"STS Average (3 datasets)":68.12} -{"index":39,"Rank":6,"Model":"mmlw-roberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":63.23,"Classification Average (7 datasets)":66.39,"Clustering Average (1 datasets)":31.16,"PairClassification Average (4 datasets)":89.13,"Retrieval Average (11 datasets)":52.71,"STS Average (3 datasets)":70.59} -{"index":36,"Rank":7,"Model":"mmlw-e5-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":61.17,"Classification Average (7 datasets)":61.07,"Clustering Average (1 datasets)":30.62,"PairClassification Average (4 datasets)":85.9,"Retrieval Average (11 datasets)":52.63,"STS Average (3 datasets)":69.98} -{"index":38,"Rank":8,"Model":"mmlw-roberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":61.05,"Classification Average (7 datasets)":62.92,"Clustering Average (1 datasets)":33.08,"PairClassification Average (4 datasets)":88.14,"Retrieval Average (11 datasets)":49.92,"STS Average (3 datasets)":70.7} -{"index":26,"Rank":9,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (26 datasets)":60.08,"Classification Average (7 datasets)":63.82,"Clustering Average (1 datasets)":33.88,"PairClassification Average (4 datasets)":85.5,"Retrieval Average (11 datasets)":48.98,"STS Average (3 datasets)":66.91} -{"index":35,"Rank":10,"Model":"mmlw-e5-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":59.71,"Classification Average (7 datasets)":59.52,"Clustering Average (1 datasets)":30.25,"PairClassification Average (4 datasets)":86.16,"Retrieval Average (11 datasets)":50.06,"STS Average (3 datasets)":70.13} -{"index":19,"Rank":11,"Model":"st-polish-kartonberta-base-alpha-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":56.92,"Classification Average (7 datasets)":60.44,"Clustering Average (1 datasets)":32.85,"PairClassification Average (4 datasets)":87.92,"Retrieval Average (11 datasets)":42.19,"STS Average (3 datasets)":69.47} -{"index":37,"Rank":12,"Model":"mmlw-e5-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":55.84,"Classification Average (7 datasets)":57.73,"Clustering Average (1 datasets)":31.77,"PairClassification Average (4 datasets)":84.61,"Retrieval Average (11 datasets)":42.83,"STS Average (3 datasets)":68.77} -{"index":25,"Rank":13,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":55.62,"Classification Average (7 datasets)":59.01,"Clustering Average (1 datasets)":24.97,"PairClassification Average (4 datasets)":82.15,"Retrieval Average (11 datasets)":44.01,"STS Average (3 datasets)":65.13} -{"index":28,"Rank":14,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (26 datasets)":54.14,"Classification Average (7 datasets)":57.05,"Clustering Average (1 datasets)":23.92,"PairClassification Average (4 datasets)":80.5,"Retrieval Average (11 datasets)":42.43,"STS Average (3 datasets)":65.18} -{"index":30,"Rank":15,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":52.75,"Classification Average (7 datasets)":58.99,"Clustering Average (1 datasets)":31.49,"PairClassification Average (4 datasets)":77.33,"Retrieval Average (11 datasets)":38.43,"STS Average (3 datasets)":65.01} -{"index":29,"Rank":16,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":52.52,"Classification Average (7 datasets)":59.26,"Clustering Average (1 datasets)":28.15,"PairClassification Average (4 datasets)":75.32,"Retrieval Average (11 datasets)":39.16,"STS Average (3 datasets)":63.53} -{"index":41,"Rank":17,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":52.48,"Classification Average (7 datasets)":57.49,"Clustering Average (1 datasets)":33.15,"PairClassification Average (4 datasets)":87.04,"Retrieval Average (11 datasets)":34.44,"STS Average (3 datasets)":67.36} -{"index":40,"Rank":18,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":51.88,"Classification Average (7 datasets)":58.55,"Clustering Average (1 datasets)":31.68,"PairClassification Average (4 datasets)":87.0,"Retrieval Average (11 datasets)":32.08,"STS Average (3 datasets)":68.78} -{"index":52,"Rank":19,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":48.67,"Classification Average (7 datasets)":54.09,"Clustering Average (1 datasets)":25.62,"PairClassification Average (4 datasets)":86.23,"Retrieval Average (11 datasets)":29.16,"STS Average (3 datasets)":65.19} -{"index":51,"Rank":20,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (26 datasets)":46.4,"Classification Average (7 datasets)":52.18,"Clustering Average (1 datasets)":23.24,"PairClassification Average (4 datasets)":83.28,"Retrieval Average (11 datasets)":26.66,"STS Average (3 datasets)":63.83} -{"index":42,"Rank":21,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":45.36,"Classification Average (7 datasets)":57.39,"Clustering Average (1 datasets)":12.96,"PairClassification Average (4 datasets)":79.27,"Retrieval Average (11 datasets)":23.36,"STS Average (3 datasets)":63.57} -{"index":48,"Rank":22,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":512,"Max Tokens":512,"Average (26 datasets)":42.3,"Classification Average (7 datasets)":49.92,"Clustering Average (1 datasets)":12.51,"PairClassification Average (4 datasets)":79.26,"Retrieval Average (11 datasets)":21.18,"STS Average (3 datasets)":62.59} -{"index":0,"Rank":23,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Embedding Dimensions":1024,"Max Tokens":"N\/A","Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} -{"index":3,"Rank":24,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":"","Classification Average (7 datasets)":77.99,"Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":89.62,"Retrieval Average (11 datasets)":59.41,"STS Average (3 datasets)":70.64} -{"index":4,"Rank":25,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":"","Classification Average (7 datasets)":37.66,"Clustering Average (1 datasets)":4.68,"PairClassification Average (4 datasets)":59.51,"Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} -{"index":7,"Rank":28,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Embedding Dimensions":"","Max Tokens":4096,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} -{"index":23,"Rank":42,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (26 datasets)":"","Classification Average (7 datasets)":56.34,"Clustering Average (1 datasets)":18.79,"PairClassification Average (4 datasets)":73.2,"Retrieval Average (11 datasets)":"","STS Average (3 datasets)":58.02} -{"index":24,"Rank":43,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} -{"index":31,"Rank":45,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":2048,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} -{"index":32,"Rank":46,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":2048,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} -{"index":43,"Rank":47,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} -{"index":44,"Rank":48,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} -{"index":45,"Rank":49,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} -{"index":46,"Rank":50,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} -{"index":47,"Rank":51,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":300,"Max Tokens":"N\/A","Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} -{"index":49,"Rank":52,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} -{"index":50,"Rank":53,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} -{"index":53,"Rank":54,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} -{"index":54,"Rank":55,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} -{"index":55,"Rank":56,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (26 datasets)":61.17,"Classification Average (7 datasets)":62.36,"Clustering Average (1 datasets)":33.88,"PairClassification Average (4 datasets)":84.55,"Retrieval Average (11 datasets)":47.32,"STS Average (3 datasets)":83.04} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":56.61,"Classification Average (7 datasets)":57.17,"Clustering Average (1 datasets)":24.97,"PairClassification Average (4 datasets)":81.31,"Retrieval Average (11 datasets)":42.3,"STS Average (3 datasets)":80.66} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (26 datasets)":55.23,"Classification Average (7 datasets)":55.53,"Clustering Average (1 datasets)":23.92,"PairClassification Average (4 datasets)":79.61,"Retrieval Average (11 datasets)":41.06,"STS Average (3 datasets)":79.86} +{"Rank":4,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":49.26,"Classification Average (7 datasets)":49.07,"Clustering Average (1 datasets)":25.62,"PairClassification Average (4 datasets)":85.17,"Retrieval Average (11 datasets)":28.51,"STS Average (3 datasets)":80.96} +{"Rank":5,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (26 datasets)":47.18,"Classification Average (7 datasets)":48.15,"Clustering Average (1 datasets)":23.24,"PairClassification Average (4 datasets)":82.06,"Retrieval Average (11 datasets)":26.21,"STS Average (3 datasets)":78.88} +{"Rank":6,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":46.26,"Classification Average (7 datasets)":55.48,"Clustering Average (1 datasets)":12.96,"PairClassification Average (4 datasets)":78.17,"Retrieval Average (11 datasets)":22.73,"STS Average (3 datasets)":75.72} +{"Rank":7,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Embedding Dimensions":1024,"Max Tokens":"N\/A","Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} +{"Rank":8,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Embedding Dimensions":"","Max Tokens":4096,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} +{"Rank":9,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":28.15,"PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} +{"Rank":10,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":31.49,"PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} +{"Rank":11,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":31.68,"PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} +{"Rank":12,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":33.15,"PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} +{"Rank":13,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} +{"Rank":14,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} +{"Rank":15,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} +{"Rank":16,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} +{"Rank":17,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":300,"Max Tokens":"N\/A","Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} +{"Rank":18,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":512,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":12.51,"PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} +{"Rank":19,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} +{"Rank":20,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} +{"Rank":21,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} +{"Rank":22,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} +{"Rank":23,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (26 datasets)":"","Classification Average (7 datasets)":"","Clustering Average (1 datasets)":"","PairClassification Average (4 datasets)":"","Retrieval Average (11 datasets)":"","STS Average (3 datasets)":""} diff --git a/boards_data/pl/data_tasks/Classification/default.jsonl b/boards_data/pl/data_tasks/Classification/default.jsonl index 12084ba7a4257707859a4f99d63fee89edbd428c..04b50a7f4e9e8f782683ab712b8b2666368f8315 100644 --- a/boards_data/pl/data_tasks/Classification/default.jsonl +++ b/boards_data/pl/data_tasks/Classification/default.jsonl @@ -1,53 +1,23 @@ -{"level_0":0,"index":3,"Rank":1,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.99,"AllegroReviews":65.0,"CBD":84.13,"MassiveIntentClassification (pl)":79.41,"MassiveScenarioClassification (pl)":81.93,"PAC":67.24,"PolEmo2.0-IN":90.42,"PolEmo2.0-OUT":77.77} -{"level_0":1,"index":22,"Rank":2,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.84,"AllegroReviews":67.14,"CBD":78.03,"MassiveIntentClassification (pl)":80.75,"MassiveScenarioClassification (pl)":85.54,"PAC":69.04,"PolEmo2.0-IN":89.39,"PolEmo2.0-OUT":74.98} -{"level_0":2,"index":34,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":77.84,"AllegroReviews":67.14,"CBD":78.03,"MassiveIntentClassification (pl)":80.75,"MassiveScenarioClassification (pl)":85.54,"PAC":69.04,"PolEmo2.0-IN":89.39,"PolEmo2.0-OUT":74.98} -{"level_0":3,"index":2,"Rank":4,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":77.84,"AllegroReviews":67.14,"CBD":78.03,"MassiveIntentClassification (pl)":80.75,"MassiveScenarioClassification (pl)":85.54,"PAC":69.04,"PolEmo2.0-IN":89.39,"PolEmo2.0-OUT":74.98} -{"level_0":4,"index":1,"Rank":5,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.29,"AllegroReviews":63.98,"CBD":68.56,"MassiveIntentClassification (pl)":73.55,"MassiveScenarioClassification (pl)":76.37,"PAC":69.04,"PolEmo2.0-IN":86.16,"PolEmo2.0-OUT":68.4} -{"level_0":5,"index":33,"Rank":6,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.29,"AllegroReviews":63.98,"CBD":68.56,"MassiveIntentClassification (pl)":73.55,"MassiveScenarioClassification (pl)":76.37,"PAC":69.04,"PolEmo2.0-IN":86.16,"PolEmo2.0-OUT":68.4} -{"level_0":6,"index":39,"Rank":7,"Model":"mmlw-roberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.39,"AllegroReviews":47.49,"CBD":69.33,"MassiveIntentClassification (pl)":74.81,"MassiveScenarioClassification (pl)":77.84,"PAC":64.69,"PolEmo2.0-IN":76.84,"PolEmo2.0-OUT":53.72} -{"level_0":7,"index":26,"Rank":8,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":63.82,"AllegroReviews":41.14,"CBD":69.9,"MassiveIntentClassification (pl)":65.07,"MassiveScenarioClassification (pl)":69.82,"PAC":70.37,"PolEmo2.0-IN":77.06,"PolEmo2.0-OUT":53.38} -{"level_0":8,"index":38,"Rank":9,"Model":"mmlw-roberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.92,"AllegroReviews":40.26,"CBD":68.11,"MassiveIntentClassification (pl)":72.31,"MassiveScenarioClassification (pl)":75.53,"PAC":65.87,"PolEmo2.0-IN":71.73,"PolEmo2.0-OUT":46.6} -{"level_0":9,"index":36,"Rank":10,"Model":"mmlw-e5-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.07,"AllegroReviews":37.68,"CBD":66.15,"MassiveIntentClassification (pl)":72.01,"MassiveScenarioClassification (pl)":75.27,"PAC":63.77,"PolEmo2.0-IN":69.46,"PolEmo2.0-OUT":43.14} -{"level_0":10,"index":19,"Rank":11,"Model":"st-polish-kartonberta-base-alpha-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.44,"AllegroReviews":40.19,"CBD":67.69,"MassiveIntentClassification (pl)":68.2,"MassiveScenarioClassification (pl)":73.97,"PAC":66.55,"PolEmo2.0-IN":68.41,"PolEmo2.0-OUT":38.06} -{"level_0":11,"index":35,"Rank":12,"Model":"mmlw-e5-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.52,"AllegroReviews":36.38,"CBD":65.54,"MassiveIntentClassification (pl)":70.93,"MassiveScenarioClassification (pl)":74.76,"PAC":64.5,"PolEmo2.0-IN":68.7,"PolEmo2.0-OUT":35.81} -{"level_0":12,"index":29,"Rank":13,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":59.26,"AllegroReviews":34.11,"CBD":68.35,"MassiveIntentClassification (pl)":65.53,"MassiveScenarioClassification (pl)":68.51,"PAC":68.4,"PolEmo2.0-IN":64.18,"PolEmo2.0-OUT":45.73} -{"level_0":13,"index":25,"Rank":14,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":59.01,"AllegroReviews":40.85,"CBD":62.66,"MassiveIntentClassification (pl)":61.04,"MassiveScenarioClassification (pl)":66.11,"PAC":70.87,"PolEmo2.0-IN":67.66,"PolEmo2.0-OUT":43.91} -{"level_0":14,"index":30,"Rank":15,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":58.99,"AllegroReviews":33.35,"CBD":68.51,"MassiveIntentClassification (pl)":66.63,"MassiveScenarioClassification (pl)":69.97,"PAC":66.26,"PolEmo2.0-IN":63.52,"PolEmo2.0-OUT":44.7} -{"level_0":15,"index":40,"Rank":16,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":58.55,"AllegroReviews":34.5,"CBD":70.27,"MassiveIntentClassification (pl)":64.81,"MassiveScenarioClassification (pl)":70.01,"PAC":64.6,"PolEmo2.0-IN":67.06,"PolEmo2.0-OUT":38.58} -{"level_0":16,"index":37,"Rank":17,"Model":"mmlw-e5-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.73,"AllegroReviews":33.03,"CBD":64.24,"MassiveIntentClassification (pl)":67.35,"MassiveScenarioClassification (pl)":72.13,"PAC":63.11,"PolEmo2.0-IN":63.96,"PolEmo2.0-OUT":40.32} -{"level_0":17,"index":41,"Rank":18,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":57.49,"AllegroReviews":34.55,"CBD":67.48,"MassiveIntentClassification (pl)":65.93,"MassiveScenarioClassification (pl)":71.85,"PAC":63.25,"PolEmo2.0-IN":68.37,"PolEmo2.0-OUT":30.99} -{"level_0":18,"index":42,"Rank":19,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":57.39,"AllegroReviews":34.89,"CBD":65.71,"MassiveIntentClassification (pl)":59.71,"MassiveScenarioClassification (pl)":64.58,"PAC":68.11,"PolEmo2.0-IN":64.0,"PolEmo2.0-OUT":44.72} -{"level_0":19,"index":28,"Rank":20,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":57.05,"AllegroReviews":37.42,"CBD":63.25,"MassiveIntentClassification (pl)":57.4,"MassiveScenarioClassification (pl)":64.25,"PAC":70.55,"PolEmo2.0-IN":67.35,"PolEmo2.0-OUT":39.13} -{"level_0":20,"index":23,"Rank":21,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.34,"AllegroReviews":29.62,"CBD":63.83,"MassiveIntentClassification (pl)":65.86,"MassiveScenarioClassification (pl)":69.99,"PAC":73.87,"PolEmo2.0-IN":52.8,"PolEmo2.0-OUT":38.4} -{"level_0":21,"index":52,"Rank":22,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":54.09,"AllegroReviews":33.86,"CBD":65.0,"MassiveIntentClassification (pl)":64.29,"MassiveScenarioClassification (pl)":68.98,"PAC":63.76,"PolEmo2.0-IN":62.78,"PolEmo2.0-OUT":19.98} -{"level_0":22,"index":51,"Rank":23,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":52.18,"AllegroReviews":30.88,"CBD":57.68,"MassiveIntentClassification (pl)":59.43,"MassiveScenarioClassification (pl)":65.04,"PAC":65.76,"PolEmo2.0-IN":57.76,"PolEmo2.0-OUT":28.7} -{"level_0":23,"index":48,"Rank":24,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":49.92,"AllegroReviews":28.03,"CBD":60.0,"MassiveIntentClassification (pl)":53.1,"MassiveScenarioClassification (pl)":61.29,"PAC":68.17,"PolEmo2.0-IN":48.84,"PolEmo2.0-OUT":30.0} -{"level_0":24,"index":4,"Rank":25,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":37.66,"AllegroReviews":24.89,"CBD":53.78,"MassiveIntentClassification (pl)":31.77,"MassiveScenarioClassification (pl)":37.49,"PAC":57.14,"PolEmo2.0-IN":40.97,"PolEmo2.0-OUT":17.57} -{"level_0":25,"index":0,"Rank":26,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":44.99,"MassiveScenarioClassification (pl)":52.92,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":26,"index":5,"Rank":27,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":27.62,"MassiveScenarioClassification (pl)":31.6,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":27,"index":6,"Rank":28,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":33.17,"MassiveScenarioClassification (pl)":36.34,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":28,"index":8,"Rank":30,"Model":"2024-06-15_10-09-42<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":0.23,"CBD":0.5,"MassiveIntentClassification (pl)":"","MassiveScenarioClassification (pl)":"","PAC":0.56,"PolEmo2.0-IN":0.37,"PolEmo2.0-OUT":0.3} -{"level_0":29,"index":9,"Rank":31,"Model":"2024-06-17_21-37-12<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":0.21,"CBD":0.54,"MassiveIntentClassification (pl)":"","MassiveScenarioClassification (pl)":"","PAC":0.63,"PolEmo2.0-IN":0.34,"PolEmo2.0-OUT":0.28} -{"level_0":30,"index":10,"Rank":32,"Model":"2024-06-19_08-22-22<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":0.22,"CBD":0.51,"MassiveIntentClassification (pl)":"","MassiveScenarioClassification (pl)":"","PAC":0.55,"PolEmo2.0-IN":0.39,"PolEmo2.0-OUT":0.29} -{"level_0":31,"index":11,"Rank":33,"Model":"2024-06-19_10-03-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":0.26,"CBD":0.55,"MassiveIntentClassification (pl)":"","MassiveScenarioClassification (pl)":"","PAC":0.58,"PolEmo2.0-IN":0.36,"PolEmo2.0-OUT":0.29} -{"level_0":32,"index":12,"Rank":34,"Model":"2024-06-19_21-12-17<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":0.22,"CBD":0.52,"MassiveIntentClassification (pl)":"","MassiveScenarioClassification (pl)":"","PAC":0.55,"PolEmo2.0-IN":0.35,"PolEmo2.0-OUT":0.3} -{"level_0":33,"index":13,"Rank":35,"Model":"2024-06-19_22-23-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":0.27,"CBD":0.58,"MassiveIntentClassification (pl)":"","MassiveScenarioClassification (pl)":"","PAC":0.65,"PolEmo2.0-IN":0.53,"PolEmo2.0-OUT":0.23} -{"level_0":34,"index":14,"Rank":36,"Model":"2024-06-19_22-27-15<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":21.69,"MassiveScenarioClassification (pl)":26.17,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":35,"index":15,"Rank":37,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":38.21,"MassiveScenarioClassification (pl)":46.21,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":36,"index":17,"Rank":39,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":42.2,"MassiveScenarioClassification (pl)":41.89,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":37,"index":18,"Rank":40,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":42.62,"MassiveScenarioClassification (pl)":41.88,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":38,"index":20,"Rank":41,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":60.07,"MassiveScenarioClassification (pl)":64.0,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":39,"index":21,"Rank":42,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":50.48,"MassiveScenarioClassification (pl)":49.98,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":40,"index":24,"Rank":43,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":74.85,"MassiveScenarioClassification (pl)":77.37,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":41,"index":27,"Rank":44,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":73.65,"MassiveScenarioClassification (pl)":76.69,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":42,"index":31,"Rank":45,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":44.47,"MassiveScenarioClassification (pl)":45.6,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":43,"index":32,"Rank":46,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":41.63,"MassiveScenarioClassification (pl)":41.63,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":44,"index":43,"Rank":47,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":37.63,"MassiveScenarioClassification (pl)":44.72,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":45,"index":46,"Rank":50,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":35.77,"MassiveScenarioClassification (pl)":36.87,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":46,"index":49,"Rank":52,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":39.08,"MassiveScenarioClassification (pl)":46.79,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":47,"index":50,"Rank":53,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":42.64,"MassiveScenarioClassification (pl)":49.97,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":48,"index":53,"Rank":54,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":34.41,"MassiveScenarioClassification (pl)":42.3,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":49,"index":54,"Rank":55,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":37.67,"MassiveScenarioClassification (pl)":45.2,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":50,"index":55,"Rank":56,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":43.95,"MassiveScenarioClassification (pl)":49.87,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":51,"index":56,"Rank":57,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":26.05,"MassiveScenarioClassification (pl)":30.15,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} -{"level_0":52,"index":57,"Rank":58,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AllegroReviews":"","CBD":"","MassiveIntentClassification (pl)":58.91,"MassiveScenarioClassification (pl)":62.55,"PAC":"","PolEmo2.0-IN":"","PolEmo2.0-OUT":""} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":62.36,"AllegroReviews":41.14,"AllegroReviews (pol-Latn)":41.04,"CBD":69.9,"CBD (pol-Latn)":69.84,"PAC":70.37,"PAC (pol-Latn)":70.33,"PolEmo2.0-IN":77.06,"PolEmo2.0-IN (pol-Latn)":77.06,"PolEmo2.0-OUT":53.38,"PolEmo2.0-OUT (pol-Latn)":53.48} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":57.17,"AllegroReviews":40.85,"AllegroReviews (pol-Latn)":40.78,"CBD":62.66,"CBD (pol-Latn)":62.6,"PAC":70.87,"PAC (pol-Latn)":70.87,"PolEmo2.0-IN":67.66,"PolEmo2.0-IN (pol-Latn)":67.59,"PolEmo2.0-OUT":43.91,"PolEmo2.0-OUT (pol-Latn)":43.93} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":55.53,"AllegroReviews":37.42,"AllegroReviews (pol-Latn)":37.33,"CBD":63.25,"CBD (pol-Latn)":63.33,"PAC":70.55,"PAC (pol-Latn)":70.48,"PolEmo2.0-IN":67.35,"PolEmo2.0-IN (pol-Latn)":67.31,"PolEmo2.0-OUT":39.13,"PolEmo2.0-OUT (pol-Latn)":39.17} +{"Rank":4,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":55.48,"AllegroReviews":34.89,"AllegroReviews (pol-Latn)":34.86,"CBD":65.71,"CBD (pol-Latn)":65.74,"PAC":68.11,"PAC (pol-Latn)":68.09,"PolEmo2.0-IN":64.0,"PolEmo2.0-IN (pol-Latn)":63.91,"PolEmo2.0-OUT":44.72,"PolEmo2.0-OUT (pol-Latn)":44.76} +{"Rank":5,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":49.07,"AllegroReviews":33.86,"AllegroReviews (pol-Latn)":33.89,"CBD":65.0,"CBD (pol-Latn)":64.97,"PAC":63.76,"PAC (pol-Latn)":63.76,"PolEmo2.0-IN":62.78,"PolEmo2.0-IN (pol-Latn)":62.74,"PolEmo2.0-OUT":19.98,"PolEmo2.0-OUT (pol-Latn)":19.92} +{"Rank":6,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":48.15,"AllegroReviews":30.88,"AllegroReviews (pol-Latn)":30.85,"CBD":57.68,"CBD (pol-Latn)":57.71,"PAC":65.76,"PAC (pol-Latn)":65.75,"PolEmo2.0-IN":57.76,"PolEmo2.0-IN (pol-Latn)":57.76,"PolEmo2.0-OUT":28.7,"PolEmo2.0-OUT (pol-Latn)":28.66} +{"Rank":7,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":null,"CBD":null,"CBD (pol-Latn)":null,"PAC":null,"PAC (pol-Latn)":null,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":8,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":37.32,"CBD":null,"CBD (pol-Latn)":70.98,"PAC":null,"PAC (pol-Latn)":68.09,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":66.07,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":32.94} +{"Rank":9,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"AllegroReviews":34.11,"AllegroReviews (pol-Latn)":null,"CBD":68.35,"CBD (pol-Latn)":null,"PAC":68.4,"PAC (pol-Latn)":null,"PolEmo2.0-IN":64.18,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":45.73,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":10,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"AllegroReviews":33.35,"AllegroReviews (pol-Latn)":null,"CBD":68.51,"CBD (pol-Latn)":null,"PAC":66.26,"PAC (pol-Latn)":null,"PolEmo2.0-IN":63.52,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":44.7,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":11,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"AllegroReviews":34.5,"AllegroReviews (pol-Latn)":null,"CBD":70.27,"CBD (pol-Latn)":null,"PAC":64.6,"PAC (pol-Latn)":null,"PolEmo2.0-IN":67.06,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":38.58,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":12,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"AllegroReviews":34.55,"AllegroReviews (pol-Latn)":null,"CBD":67.48,"CBD (pol-Latn)":null,"PAC":63.25,"PAC (pol-Latn)":null,"PolEmo2.0-IN":68.37,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":30.99,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":13,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":23.85,"CBD":null,"CBD (pol-Latn)":48.46,"PAC":null,"PAC (pol-Latn)":59.53,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":38.32,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":22.98} +{"Rank":14,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":24.64,"CBD":null,"CBD (pol-Latn)":50.9,"PAC":null,"PAC (pol-Latn)":59.78,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":40.29,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":25.0} +{"Rank":15,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":22.99,"CBD":null,"CBD (pol-Latn)":50.25,"PAC":null,"PAC (pol-Latn)":62.1,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":41.63,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":25.0} +{"Rank":16,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":null,"CBD":null,"CBD (pol-Latn)":null,"PAC":null,"PAC (pol-Latn)":null,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":17,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":null,"CBD":null,"CBD (pol-Latn)":null,"PAC":null,"PAC (pol-Latn)":null,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":18,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"AllegroReviews":28.03,"AllegroReviews (pol-Latn)":null,"CBD":60.0,"CBD (pol-Latn)":null,"PAC":68.17,"PAC (pol-Latn)":null,"PolEmo2.0-IN":48.84,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":30.0,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":19,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":null,"CBD":null,"CBD (pol-Latn)":null,"PAC":null,"PAC (pol-Latn)":null,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":20,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":null,"CBD":null,"CBD (pol-Latn)":null,"PAC":null,"PAC (pol-Latn)":null,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":21,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":null,"CBD":null,"CBD (pol-Latn)":null,"PAC":null,"PAC (pol-Latn)":null,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":22,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":null,"CBD":null,"CBD (pol-Latn)":null,"PAC":null,"PAC (pol-Latn)":null,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":null} +{"Rank":23,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AllegroReviews":null,"AllegroReviews (pol-Latn)":null,"CBD":null,"CBD (pol-Latn)":null,"PAC":null,"PAC (pol-Latn)":null,"PolEmo2.0-IN":null,"PolEmo2.0-IN (pol-Latn)":null,"PolEmo2.0-OUT":null,"PolEmo2.0-OUT (pol-Latn)":null} diff --git a/boards_data/pl/data_tasks/Clustering/default.jsonl b/boards_data/pl/data_tasks/Clustering/default.jsonl index 417253a7926560359bb4cc009bcc42e13a7938fb..6d29299e612a33acb2c6393574dc679bba061880 100644 --- a/boards_data/pl/data_tasks/Clustering/default.jsonl +++ b/boards_data/pl/data_tasks/Clustering/default.jsonl @@ -1,24 +1,23 @@ -{"level_0":0,"index":2,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"8TagsClustering":51.36} -{"level_0":1,"index":22,"Rank":2,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":51.36} -{"level_0":2,"index":34,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":51.36} -{"level_0":3,"index":1,"Rank":4,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":44.59} -{"level_0":4,"index":33,"Rank":5,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":44.59} -{"level_0":5,"index":26,"Rank":6,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"8TagsClustering":33.88} -{"level_0":6,"index":41,"Rank":7,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"8TagsClustering":33.15} -{"level_0":7,"index":38,"Rank":8,"Model":"mmlw-roberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":33.08} -{"level_0":8,"index":19,"Rank":9,"Model":"st-polish-kartonberta-base-alpha-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":32.85} -{"level_0":9,"index":37,"Rank":10,"Model":"mmlw-e5-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":31.77} -{"level_0":10,"index":40,"Rank":11,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"8TagsClustering":31.68} -{"level_0":11,"index":30,"Rank":12,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"8TagsClustering":31.49} -{"level_0":12,"index":39,"Rank":13,"Model":"mmlw-roberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":31.16} -{"level_0":13,"index":36,"Rank":14,"Model":"mmlw-e5-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":30.62} -{"level_0":14,"index":35,"Rank":15,"Model":"mmlw-e5-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":30.25} -{"level_0":15,"index":29,"Rank":16,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"8TagsClustering":28.15} -{"level_0":16,"index":52,"Rank":17,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"8TagsClustering":25.62} -{"level_0":17,"index":25,"Rank":18,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"8TagsClustering":24.97} -{"level_0":18,"index":28,"Rank":19,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"8TagsClustering":23.92} -{"level_0":19,"index":51,"Rank":20,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"8TagsClustering":23.24} -{"level_0":20,"index":23,"Rank":21,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":18.79} -{"level_0":21,"index":42,"Rank":22,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"8TagsClustering":12.96} -{"level_0":22,"index":48,"Rank":23,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"8TagsClustering":12.51} -{"level_0":23,"index":4,"Rank":24,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","8TagsClustering":4.68} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"8TagsClustering":33.88} +{"Rank":2,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"8TagsClustering":33.15} +{"Rank":3,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"8TagsClustering":31.68} +{"Rank":4,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"8TagsClustering":31.49} +{"Rank":5,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"8TagsClustering":28.15} +{"Rank":6,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"8TagsClustering":25.62} +{"Rank":7,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"8TagsClustering":24.97} +{"Rank":8,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"8TagsClustering":23.92} +{"Rank":9,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"8TagsClustering":23.24} +{"Rank":10,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"8TagsClustering":12.96} +{"Rank":11,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"8TagsClustering":12.51} +{"Rank":12,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"8TagsClustering":null} +{"Rank":13,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"8TagsClustering":null} +{"Rank":14,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"8TagsClustering":null} +{"Rank":15,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"8TagsClustering":null} +{"Rank":16,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"8TagsClustering":null} +{"Rank":17,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"8TagsClustering":null} +{"Rank":18,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"8TagsClustering":null} +{"Rank":19,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"8TagsClustering":null} +{"Rank":20,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"8TagsClustering":null} +{"Rank":21,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"8TagsClustering":null} +{"Rank":22,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"8TagsClustering":null} +{"Rank":23,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"8TagsClustering":null} diff --git a/boards_data/pl/data_tasks/PairClassification/default.jsonl b/boards_data/pl/data_tasks/PairClassification/default.jsonl index 8dc165a08b91539b7448bcecc8aceb987b70abca..f5cd8b5a3e42faa2c53f10489b18506947dad554 100644 --- a/boards_data/pl/data_tasks/PairClassification/default.jsonl +++ b/boards_data/pl/data_tasks/PairClassification/default.jsonl @@ -1,31 +1,23 @@ -{"level_0":0,"index":3,"Rank":1,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":89.62,"CDSC-E":78.23,"PPC":95.43,"PSC":99.24,"SICK-E-PL":85.58} -{"level_0":1,"index":39,"Rank":2,"Model":"mmlw-roberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":89.13,"CDSC-E":79.87,"PPC":93.56,"PSC":98.63,"SICK-E-PL":84.47} -{"level_0":2,"index":2,"Rank":3,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":88.48,"CDSC-E":76.75,"PPC":94.1,"PSC":99.37,"SICK-E-PL":83.68} -{"level_0":3,"index":34,"Rank":4,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.48,"CDSC-E":76.75,"PPC":94.1,"PSC":99.37,"SICK-E-PL":83.68} -{"level_0":4,"index":22,"Rank":5,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.48,"CDSC-E":76.75,"PPC":94.1,"PSC":99.37,"SICK-E-PL":83.68} -{"level_0":5,"index":38,"Rank":6,"Model":"mmlw-roberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.14,"CDSC-E":79.1,"PPC":92.81,"PSC":98.61,"SICK-E-PL":82.04} -{"level_0":6,"index":19,"Rank":7,"Model":"st-polish-kartonberta-base-alpha-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.92,"CDSC-E":79.12,"PPC":92.65,"PSC":98.42,"SICK-E-PL":81.47} -{"level_0":7,"index":41,"Rank":8,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":87.04,"CDSC-E":75.06,"PPC":93.49,"PSC":99.05,"SICK-E-PL":80.56} -{"level_0":8,"index":40,"Rank":9,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":87.0,"CDSC-E":75.99,"PPC":93.29,"PSC":99.1,"SICK-E-PL":79.63} -{"level_0":9,"index":52,"Rank":10,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":86.23,"CDSC-E":75.76,"PPC":93.67,"PSC":98.26,"SICK-E-PL":77.22} -{"level_0":10,"index":35,"Rank":11,"Model":"mmlw-e5-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.16,"CDSC-E":77.27,"PPC":91.76,"PSC":99.15,"SICK-E-PL":76.45} -{"level_0":11,"index":36,"Rank":12,"Model":"mmlw-e5-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.9,"CDSC-E":76.83,"PPC":91.09,"PSC":99.53,"SICK-E-PL":76.14} -{"level_0":12,"index":26,"Rank":13,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":85.5,"CDSC-E":74.47,"PPC":92.18,"PSC":99.39,"SICK-E-PL":75.96} -{"level_0":13,"index":1,"Rank":14,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.87,"CDSC-E":72.42,"PPC":91.47,"PSC":99.28,"SICK-E-PL":76.32} -{"level_0":14,"index":33,"Rank":15,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.87,"CDSC-E":72.42,"PPC":91.47,"PSC":99.28,"SICK-E-PL":76.32} -{"level_0":15,"index":37,"Rank":16,"Model":"mmlw-e5-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.61,"CDSC-E":76.68,"PPC":89.7,"PSC":98.86,"SICK-E-PL":73.21} -{"level_0":16,"index":51,"Rank":17,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":83.28,"CDSC-E":72.22,"PPC":91.8,"PSC":97.14,"SICK-E-PL":71.94} -{"level_0":17,"index":25,"Rank":18,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":82.15,"CDSC-E":72.67,"PPC":88.01,"PSC":99.14,"SICK-E-PL":68.77} -{"level_0":18,"index":28,"Rank":19,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":80.5,"CDSC-E":69.7,"PPC":86.72,"PSC":99.24,"SICK-E-PL":66.34} -{"level_0":19,"index":42,"Rank":20,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":79.27,"CDSC-E":68.91,"PPC":86.97,"PSC":97.42,"SICK-E-PL":63.77} -{"level_0":20,"index":48,"Rank":21,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":79.26,"CDSC-E":71.83,"PPC":86.83,"PSC":96.35,"SICK-E-PL":62.05} -{"level_0":21,"index":30,"Rank":22,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":77.33,"CDSC-E":67.35,"PPC":85.33,"PSC":98.46,"SICK-E-PL":58.19} -{"level_0":22,"index":29,"Rank":23,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":75.32,"CDSC-E":63.31,"PPC":84.18,"PSC":98.87,"SICK-E-PL":54.93} -{"level_0":23,"index":23,"Rank":24,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.2,"CDSC-E":59.97,"PPC":85.37,"PSC":91.98,"SICK-E-PL":55.48} -{"level_0":24,"index":4,"Rank":25,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.51,"CDSC-E":43.51,"PPC":70.19,"PSC":77.83,"SICK-E-PL":46.51} -{"level_0":25,"index":8,"Rank":30,"Model":"2024-06-15_10-09-42<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-E":0.43,"PPC":"","PSC":0.4,"SICK-E-PL":0.55} -{"level_0":26,"index":9,"Rank":31,"Model":"2024-06-17_21-37-12<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-E":0.37,"PPC":"","PSC":0.38,"SICK-E-PL":0.42} -{"level_0":27,"index":10,"Rank":32,"Model":"2024-06-19_08-22-22<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-E":0.46,"PPC":"","PSC":0.41,"SICK-E-PL":0.51} -{"level_0":28,"index":11,"Rank":33,"Model":"2024-06-19_10-03-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-E":0.44,"PPC":"","PSC":0.75,"SICK-E-PL":0.52} -{"level_0":29,"index":12,"Rank":34,"Model":"2024-06-19_21-12-17<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-E":0.41,"PPC":"","PSC":0.38,"SICK-E-PL":0.44} -{"level_0":30,"index":13,"Rank":35,"Model":"2024-06-19_22-23-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-E":0.56,"PPC":"","PSC":0.85,"SICK-E-PL":0.61} +{"Rank":1,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":85.17,"CDSC-E":75.76,"CDSC-E (pol-Latn)":75.77,"PPC":93.67,"PSC":98.26,"PSC (pol-Latn)":98.26,"SICK-E-PL":77.22,"SICK-E-PL (pol-Latn)":77.22} +{"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":84.55,"CDSC-E":74.47,"CDSC-E (pol-Latn)":74.47,"PPC":92.18,"PSC":99.39,"PSC (pol-Latn)":99.4,"SICK-E-PL":75.96,"SICK-E-PL (pol-Latn)":75.95} +{"Rank":3,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":82.06,"CDSC-E":72.22,"CDSC-E (pol-Latn)":72.22,"PPC":91.8,"PSC":97.14,"PSC (pol-Latn)":97.14,"SICK-E-PL":71.94,"SICK-E-PL (pol-Latn)":71.94} +{"Rank":4,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":81.31,"CDSC-E":72.67,"CDSC-E (pol-Latn)":72.7,"PPC":88.01,"PSC":99.14,"PSC (pol-Latn)":99.14,"SICK-E-PL":68.77,"SICK-E-PL (pol-Latn)":68.76} +{"Rank":5,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":79.61,"CDSC-E":69.7,"CDSC-E (pol-Latn)":69.69,"PPC":86.72,"PSC":99.24,"PSC (pol-Latn)":99.23,"SICK-E-PL":66.34,"SICK-E-PL (pol-Latn)":66.35} +{"Rank":6,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":78.17,"CDSC-E":68.91,"CDSC-E (pol-Latn)":68.92,"PPC":86.97,"PSC":97.42,"PSC (pol-Latn)":97.42,"SICK-E-PL":63.77,"SICK-E-PL (pol-Latn)":63.77} +{"Rank":7,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":null,"PPC":null,"PSC":null,"PSC (pol-Latn)":null,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":null} +{"Rank":8,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":72.65,"PPC":null,"PSC":null,"PSC (pol-Latn)":99.43,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":75.98} +{"Rank":9,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"CDSC-E":63.31,"CDSC-E (pol-Latn)":null,"PPC":84.18,"PSC":98.87,"PSC (pol-Latn)":null,"SICK-E-PL":54.93,"SICK-E-PL (pol-Latn)":null} +{"Rank":10,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"CDSC-E":67.35,"CDSC-E (pol-Latn)":null,"PPC":85.33,"PSC":98.46,"PSC (pol-Latn)":null,"SICK-E-PL":58.19,"SICK-E-PL (pol-Latn)":null} +{"Rank":11,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"CDSC-E":75.99,"CDSC-E (pol-Latn)":null,"PPC":93.29,"PSC":99.1,"PSC (pol-Latn)":null,"SICK-E-PL":79.63,"SICK-E-PL (pol-Latn)":null} +{"Rank":12,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"CDSC-E":75.06,"CDSC-E (pol-Latn)":null,"PPC":93.49,"PSC":99.05,"PSC (pol-Latn)":null,"SICK-E-PL":80.56,"SICK-E-PL (pol-Latn)":null} +{"Rank":13,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":49.04,"PPC":null,"PSC":null,"PSC (pol-Latn)":87.92,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":49.63} +{"Rank":14,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":47.27,"PPC":null,"PSC":null,"PSC (pol-Latn)":81.87,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":47.32} +{"Rank":15,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":45.37,"PPC":null,"PSC":null,"PSC (pol-Latn)":83.28,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":46.51} +{"Rank":16,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":null,"PPC":null,"PSC":null,"PSC (pol-Latn)":null,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":null} +{"Rank":17,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":null,"PPC":null,"PSC":null,"PSC (pol-Latn)":null,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":null} +{"Rank":18,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"CDSC-E":71.83,"CDSC-E (pol-Latn)":null,"PPC":86.83,"PSC":96.35,"PSC (pol-Latn)":null,"SICK-E-PL":62.05,"SICK-E-PL (pol-Latn)":null} +{"Rank":19,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":null,"PPC":null,"PSC":null,"PSC (pol-Latn)":null,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":null} +{"Rank":20,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":null,"PPC":null,"PSC":null,"PSC (pol-Latn)":null,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":null} +{"Rank":21,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":null,"PPC":null,"PSC":null,"PSC (pol-Latn)":null,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":null} +{"Rank":22,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":null,"PPC":null,"PSC":null,"PSC (pol-Latn)":null,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":null} +{"Rank":23,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CDSC-E":null,"CDSC-E (pol-Latn)":null,"PPC":null,"PSC":null,"PSC (pol-Latn)":null,"SICK-E-PL":null,"SICK-E-PL (pol-Latn)":null} diff --git a/boards_data/pl/data_tasks/Retrieval/default.jsonl b/boards_data/pl/data_tasks/Retrieval/default.jsonl index 8cd921fad35049b61eccebb26aba30dcf78412e4..643df4391399f084fd0bb15780c91dff0b65fc75 100644 --- a/boards_data/pl/data_tasks/Retrieval/default.jsonl +++ b/boards_data/pl/data_tasks/Retrieval/default.jsonl @@ -1,24 +1,23 @@ -{"level_0":0,"index":3,"Rank":1,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.41,"ArguAna-PL":59.71,"DBPedia-PL":43.19,"FiQA-PL":46.12,"HotpotQA-PL":77.03,"MSMARCO-PL":72.69,"NFCorpus-PL":36.72,"NQ-PL":56.85,"Quora-PL":84.47,"SCIDOCS-PL":19.53,"SciFact-PL":74.43,"TRECCOVID-PL":82.75} -{"level_0":1,"index":22,"Rank":2,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.69,"ArguAna-PL":59.87,"DBPedia-PL":41.3,"FiQA-PL":41.05,"HotpotQA-PL":67.61,"MSMARCO-PL":62.22,"NFCorpus-PL":30.89,"NQ-PL":48.45,"Quora-PL":83.21,"SCIDOCS-PL":16.08,"SciFact-PL":70.58,"TRECCOVID-PL":80.3} -{"level_0":2,"index":2,"Rank":3,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":54.69,"ArguAna-PL":59.87,"DBPedia-PL":41.3,"FiQA-PL":41.05,"HotpotQA-PL":67.61,"MSMARCO-PL":62.22,"NFCorpus-PL":30.89,"NQ-PL":48.45,"Quora-PL":83.21,"SCIDOCS-PL":16.08,"SciFact-PL":70.58,"TRECCOVID-PL":80.3} -{"level_0":3,"index":34,"Rank":4,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.69,"ArguAna-PL":59.87,"DBPedia-PL":41.3,"FiQA-PL":41.05,"HotpotQA-PL":67.61,"MSMARCO-PL":62.22,"NFCorpus-PL":30.89,"NQ-PL":48.45,"Quora-PL":83.21,"SCIDOCS-PL":16.08,"SciFact-PL":70.58,"TRECCOVID-PL":80.3} -{"level_0":4,"index":39,"Rank":5,"Model":"mmlw-roberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.71,"ArguAna-PL":63.4,"DBPedia-PL":40.27,"FiQA-PL":40.89,"HotpotQA-PL":71.04,"MSMARCO-PL":36.63,"NFCorpus-PL":33.94,"NQ-PL":47.62,"Quora-PL":85.51,"SCIDOCS-PL":19.47,"SciFact-PL":70.23,"TRECCOVID-PL":70.81} -{"level_0":5,"index":36,"Rank":6,"Model":"mmlw-e5-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.63,"ArguAna-PL":63.25,"DBPedia-PL":39.84,"FiQA-PL":39.9,"HotpotQA-PL":70.94,"MSMARCO-PL":36.47,"NFCorpus-PL":34.03,"NQ-PL":47.33,"Quora-PL":85.63,"SCIDOCS-PL":19.13,"SciFact-PL":71.21,"TRECCOVID-PL":71.18} -{"level_0":6,"index":33,"Rank":7,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.88,"ArguAna-PL":56.48,"DBPedia-PL":36.98,"FiQA-PL":31.61,"HotpotQA-PL":60.9,"MSMARCO-PL":63.98,"NFCorpus-PL":30.1,"NQ-PL":44.2,"Quora-PL":81.43,"SCIDOCS-PL":16.59,"SciFact-PL":67.17,"TRECCOVID-PL":81.19} -{"level_0":7,"index":1,"Rank":8,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.88,"ArguAna-PL":56.48,"DBPedia-PL":36.98,"FiQA-PL":31.61,"HotpotQA-PL":60.9,"MSMARCO-PL":63.98,"NFCorpus-PL":30.1,"NQ-PL":44.2,"Quora-PL":81.43,"SCIDOCS-PL":16.59,"SciFact-PL":67.17,"TRECCOVID-PL":81.19} -{"level_0":8,"index":35,"Rank":9,"Model":"mmlw-e5-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.06,"ArguAna-PL":58.4,"DBPedia-PL":37.19,"FiQA-PL":34.53,"HotpotQA-PL":66.25,"MSMARCO-PL":32.54,"NFCorpus-PL":33.71,"NQ-PL":44.6,"Quora-PL":84.44,"SCIDOCS-PL":17.35,"SciFact-PL":68.29,"TRECCOVID-PL":73.33} -{"level_0":9,"index":38,"Rank":10,"Model":"mmlw-roberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.92,"ArguAna-PL":59.02,"DBPedia-PL":36.22,"FiQA-PL":35.01,"HotpotQA-PL":66.64,"MSMARCO-PL":33.05,"NFCorpus-PL":34.14,"NQ-PL":45.65,"Quora-PL":84.44,"SCIDOCS-PL":17.84,"SciFact-PL":65.75,"TRECCOVID-PL":71.33} -{"level_0":10,"index":26,"Rank":11,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":48.98,"ArguAna-PL":53.02,"DBPedia-PL":35.82,"FiQA-PL":33.0,"HotpotQA-PL":67.41,"MSMARCO-PL":33.38,"NFCorpus-PL":30.24,"NQ-PL":52.79,"Quora-PL":83.65,"SCIDOCS-PL":13.81,"SciFact-PL":65.66,"TRECCOVID-PL":70.03} -{"level_0":11,"index":25,"Rank":12,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":44.01,"ArguAna-PL":42.81,"DBPedia-PL":30.23,"FiQA-PL":25.52,"HotpotQA-PL":63.52,"MSMARCO-PL":29.52,"NFCorpus-PL":25.98,"NQ-PL":44.8,"Quora-PL":81.22,"SCIDOCS-PL":12.35,"SciFact-PL":62.11,"TRECCOVID-PL":66.06} -{"level_0":12,"index":37,"Rank":13,"Model":"mmlw-e5-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.83,"ArguAna-PL":54.31,"DBPedia-PL":30.28,"FiQA-PL":29.75,"HotpotQA-PL":57.14,"MSMARCO-PL":25.94,"NFCorpus-PL":27.6,"NQ-PL":33.83,"Quora-PL":81.15,"SCIDOCS-PL":14.79,"SciFact-PL":58.14,"TRECCOVID-PL":58.2} -{"level_0":13,"index":28,"Rank":14,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":42.43,"ArguAna-PL":37.43,"DBPedia-PL":29.27,"FiQA-PL":22.03,"HotpotQA-PL":60.15,"MSMARCO-PL":26.94,"NFCorpus-PL":26.48,"NQ-PL":40.46,"Quora-PL":78.7,"SCIDOCS-PL":11.6,"SciFact-PL":62.76,"TRECCOVID-PL":70.92} -{"level_0":14,"index":19,"Rank":15,"Model":"st-polish-kartonberta-base-alpha-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.19,"ArguAna-PL":56.06,"DBPedia-PL":27.0,"FiQA-PL":24.73,"HotpotQA-PL":50.61,"MSMARCO-PL":43.25,"NFCorpus-PL":31.15,"NQ-PL":28.89,"Quora-PL":83.59,"SCIDOCS-PL":12.21,"SciFact-PL":57.73,"TRECCOVID-PL":48.83} -{"level_0":15,"index":29,"Rank":16,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":39.16,"ArguAna-PL":41.97,"DBPedia-PL":24.07,"FiQA-PL":24.25,"HotpotQA-PL":43.41,"MSMARCO-PL":51.56,"NFCorpus-PL":25.95,"NQ-PL":35.09,"Quora-PL":78.86,"SCIDOCS-PL":11.0,"SciFact-PL":51.92,"TRECCOVID-PL":42.64} -{"level_0":16,"index":30,"Rank":17,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":38.43,"ArguAna-PL":44.12,"DBPedia-PL":26.32,"FiQA-PL":24.95,"HotpotQA-PL":45.13,"MSMARCO-PL":25.47,"NFCorpus-PL":28.55,"NQ-PL":37.9,"Quora-PL":77.98,"SCIDOCS-PL":10.9,"SciFact-PL":54.44,"TRECCOVID-PL":46.98} -{"level_0":17,"index":41,"Rank":18,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":34.44,"ArguAna-PL":51.87,"DBPedia-PL":24.59,"FiQA-PL":22.27,"HotpotQA-PL":32.11,"MSMARCO-PL":17.91,"NFCorpus-PL":24.05,"NQ-PL":23.54,"Quora-PL":81.49,"SCIDOCS-PL":13.23,"SciFact-PL":52.51,"TRECCOVID-PL":35.23} -{"level_0":18,"index":40,"Rank":19,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":32.08,"ArguAna-PL":49.42,"DBPedia-PL":19.82,"FiQA-PL":19.58,"HotpotQA-PL":23.47,"MSMARCO-PL":16.51,"NFCorpus-PL":22.49,"NQ-PL":19.83,"Quora-PL":81.17,"SCIDOCS-PL":12.15,"SciFact-PL":49.49,"TRECCOVID-PL":38.97} -{"level_0":19,"index":52,"Rank":20,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":29.16,"ArguAna-PL":42.62,"DBPedia-PL":20.18,"FiQA-PL":14.68,"HotpotQA-PL":29.36,"MSMARCO-PL":12.45,"NFCorpus-PL":18.53,"NQ-PL":15.64,"Quora-PL":79.18,"SCIDOCS-PL":11.18,"SciFact-PL":41.53,"TRECCOVID-PL":35.38} -{"level_0":20,"index":51,"Rank":21,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":26.66,"ArguAna-PL":37.83,"DBPedia-PL":18.0,"FiQA-PL":12.49,"HotpotQA-PL":22.76,"MSMARCO-PL":10.39,"NFCorpus-PL":17.16,"NQ-PL":12.56,"Quora-PL":77.18,"SCIDOCS-PL":10.26,"SciFact-PL":40.24,"TRECCOVID-PL":34.38} -{"level_0":21,"index":42,"Rank":22,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":23.36,"ArguAna-PL":38.52,"DBPedia-PL":16.1,"FiQA-PL":7.63,"HotpotQA-PL":19.72,"MSMARCO-PL":7.22,"NFCorpus-PL":17.45,"NQ-PL":9.65,"Quora-PL":74.96,"SCIDOCS-PL":7.48,"SciFact-PL":39.79,"TRECCOVID-PL":18.45} -{"level_0":22,"index":48,"Rank":23,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":21.18,"ArguAna-PL":36.7,"DBPedia-PL":12.36,"FiQA-PL":8.02,"HotpotQA-PL":20.83,"MSMARCO-PL":4.57,"NFCorpus-PL":16.28,"NQ-PL":5.85,"Quora-PL":71.95,"SCIDOCS-PL":6.5,"SciFact-PL":33.03,"TRECCOVID-PL":16.91} -{"level_0":23,"index":23,"Rank":42,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ArguAna-PL":15.04,"DBPedia-PL":"","FiQA-PL":3.1,"HotpotQA-PL":"","MSMARCO-PL":"","NFCorpus-PL":4.6,"NQ-PL":"","Quora-PL":77.61,"SCIDOCS-PL":1.4,"SciFact-PL":32.24,"TRECCOVID-PL":24.53} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":47.32,"ArguAna-PL":53.02,"ArguAna-PL (pol-Latn)":52.99,"DBPedia-PL":35.82,"FiQA-PL":33.0,"FiQA-PL (pol-Latn)":32.97,"HotpotQA-PL":67.41,"MSMARCO-PL":33.38,"NFCorpus-PL":30.24,"NFCorpus-PL (pol-Latn)":30.21,"NQ-PL":52.79,"Quora-PL":83.65,"SCIDOCS-PL":13.81,"SCIDOCS-PL (pol-Latn)":13.82,"SciFact-PL":65.66,"SciFact-PL (pol-Latn)":65.66,"TRECCOVID-PL":70.03,"TRECCOVID-PL (pol-Latn)":69.9} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":42.3,"ArguAna-PL":42.81,"ArguAna-PL (pol-Latn)":42.86,"DBPedia-PL":30.23,"FiQA-PL":25.52,"FiQA-PL (pol-Latn)":25.59,"HotpotQA-PL":63.52,"MSMARCO-PL":29.52,"NFCorpus-PL":25.98,"NFCorpus-PL (pol-Latn)":25.99,"NQ-PL":44.8,"Quora-PL":81.22,"SCIDOCS-PL":12.35,"SCIDOCS-PL (pol-Latn)":12.36,"SciFact-PL":62.11,"SciFact-PL (pol-Latn)":62.26,"TRECCOVID-PL":66.06,"TRECCOVID-PL (pol-Latn)":65.94} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":41.06,"ArguAna-PL":37.43,"ArguAna-PL (pol-Latn)":37.49,"DBPedia-PL":29.27,"FiQA-PL":22.03,"FiQA-PL (pol-Latn)":22.02,"HotpotQA-PL":60.15,"MSMARCO-PL":26.94,"NFCorpus-PL":26.48,"NFCorpus-PL (pol-Latn)":26.5,"NQ-PL":40.46,"Quora-PL":78.7,"SCIDOCS-PL":11.6,"SCIDOCS-PL (pol-Latn)":11.59,"SciFact-PL":62.76,"SciFact-PL (pol-Latn)":62.76,"TRECCOVID-PL":70.92,"TRECCOVID-PL (pol-Latn)":70.92} +{"Rank":4,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":28.51,"ArguAna-PL":42.62,"ArguAna-PL (pol-Latn)":42.61,"DBPedia-PL":20.18,"FiQA-PL":14.68,"FiQA-PL (pol-Latn)":14.71,"HotpotQA-PL":29.36,"MSMARCO-PL":12.45,"NFCorpus-PL":18.53,"NFCorpus-PL (pol-Latn)":18.54,"NQ-PL":15.64,"Quora-PL":79.18,"SCIDOCS-PL":11.18,"SCIDOCS-PL (pol-Latn)":11.17,"SciFact-PL":41.53,"SciFact-PL (pol-Latn)":41.55,"TRECCOVID-PL":35.38,"TRECCOVID-PL (pol-Latn)":35.43} +{"Rank":5,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":26.21,"ArguAna-PL":37.83,"ArguAna-PL (pol-Latn)":37.86,"DBPedia-PL":18.0,"FiQA-PL":12.49,"FiQA-PL (pol-Latn)":12.49,"HotpotQA-PL":22.76,"MSMARCO-PL":10.39,"NFCorpus-PL":17.16,"NFCorpus-PL (pol-Latn)":17.17,"NQ-PL":12.56,"Quora-PL":77.18,"SCIDOCS-PL":10.26,"SCIDOCS-PL (pol-Latn)":10.26,"SciFact-PL":40.24,"SciFact-PL (pol-Latn)":40.24,"TRECCOVID-PL":34.38,"TRECCOVID-PL (pol-Latn)":34.23} +{"Rank":6,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":22.73,"ArguAna-PL":38.52,"ArguAna-PL (pol-Latn)":38.56,"DBPedia-PL":16.1,"FiQA-PL":7.63,"FiQA-PL (pol-Latn)":7.66,"HotpotQA-PL":19.72,"MSMARCO-PL":7.22,"NFCorpus-PL":17.45,"NFCorpus-PL (pol-Latn)":17.45,"NQ-PL":9.65,"Quora-PL":74.96,"SCIDOCS-PL":7.48,"SCIDOCS-PL (pol-Latn)":7.47,"SciFact-PL":39.79,"SciFact-PL (pol-Latn)":39.79,"TRECCOVID-PL":18.45,"TRECCOVID-PL (pol-Latn)":18.51} +{"Rank":7,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":null,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":8,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":48.89,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":38.04,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":32.88,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":18.39,"SciFact-PL":null,"SciFact-PL (pol-Latn)":73.22,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":58.01} +{"Rank":9,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"ArguAna-PL":41.97,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":24.07,"FiQA-PL":24.25,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":43.41,"MSMARCO-PL":51.56,"NFCorpus-PL":25.95,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":35.09,"Quora-PL":78.86,"SCIDOCS-PL":11.0,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":51.92,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":42.64,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":10,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"ArguAna-PL":44.12,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":26.32,"FiQA-PL":24.95,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":45.13,"MSMARCO-PL":25.47,"NFCorpus-PL":28.55,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":37.9,"Quora-PL":77.98,"SCIDOCS-PL":10.9,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":54.44,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":46.98,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":11,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"ArguAna-PL":49.42,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":19.82,"FiQA-PL":19.58,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":23.47,"MSMARCO-PL":16.51,"NFCorpus-PL":22.49,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":19.83,"Quora-PL":81.17,"SCIDOCS-PL":12.15,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":49.49,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":38.97,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":12,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"ArguAna-PL":51.87,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":24.59,"FiQA-PL":22.27,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":32.11,"MSMARCO-PL":17.91,"NFCorpus-PL":24.05,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":23.54,"Quora-PL":81.49,"SCIDOCS-PL":13.23,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":52.51,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":35.23,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":13,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":13.4,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":5.82,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":15.43,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":5.34,"SciFact-PL":null,"SciFact-PL (pol-Latn)":22.48,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":16.52} +{"Rank":14,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":11.5,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":2.29,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":10.62,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":3.75,"SciFact-PL":null,"SciFact-PL (pol-Latn)":16.14,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":8.66} +{"Rank":15,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":14.72,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":3.6,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":8.77,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":4.02,"SciFact-PL":null,"SciFact-PL (pol-Latn)":13.31,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":12.12} +{"Rank":16,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":null,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":17,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":null,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":18,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"ArguAna-PL":36.7,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":12.36,"FiQA-PL":8.02,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":20.83,"MSMARCO-PL":4.57,"NFCorpus-PL":16.28,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":5.85,"Quora-PL":71.95,"SCIDOCS-PL":6.5,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":33.03,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":16.91,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":19,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":null,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":20,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":null,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":21,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":null,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":22,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":null,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":null} +{"Rank":23,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"ArguAna-PL":null,"ArguAna-PL (pol-Latn)":null,"DBPedia-PL":null,"FiQA-PL":null,"FiQA-PL (pol-Latn)":null,"HotpotQA-PL":null,"MSMARCO-PL":null,"NFCorpus-PL":null,"NFCorpus-PL (pol-Latn)":null,"NQ-PL":null,"Quora-PL":null,"SCIDOCS-PL":null,"SCIDOCS-PL (pol-Latn)":null,"SciFact-PL":null,"SciFact-PL (pol-Latn)":null,"TRECCOVID-PL":null,"TRECCOVID-PL (pol-Latn)":null} diff --git a/boards_data/pl/data_tasks/STS/default.jsonl b/boards_data/pl/data_tasks/STS/default.jsonl index b4d4df45e32e28c6255827b1f81f31d471fe480a..f4a3027693db50d2941d31560e89be8930cce1dc 100644 --- a/boards_data/pl/data_tasks/STS/default.jsonl +++ b/boards_data/pl/data_tasks/STS/default.jsonl @@ -1,51 +1,23 @@ -{"level_0":0,"index":2,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":70.86,"CDSC-R":92.66,"SICK-R-PL":78.89,"STS22 (pl)":41.02} -{"level_0":1,"index":22,"Rank":2,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.86,"CDSC-R":92.66,"SICK-R-PL":78.89,"STS22 (pl)":41.02} -{"level_0":2,"index":34,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.86,"CDSC-R":92.66,"SICK-R-PL":78.89,"STS22 (pl)":41.02} -{"level_0":3,"index":38,"Rank":4,"Model":"mmlw-roberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.7,"CDSC-R":92.54,"SICK-R-PL":79.2,"STS22 (pl)":40.36} -{"level_0":4,"index":3,"Rank":5,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.64,"CDSC-R":90.97,"SICK-R-PL":78.16,"STS22 (pl)":42.79} -{"level_0":5,"index":39,"Rank":6,"Model":"mmlw-roberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.59,"CDSC-R":92.54,"SICK-R-PL":79.91,"STS22 (pl)":39.32} -{"level_0":6,"index":35,"Rank":7,"Model":"mmlw-e5-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.13,"CDSC-R":93.26,"SICK-R-PL":76.77,"STS22 (pl)":40.36} -{"level_0":7,"index":36,"Rank":8,"Model":"mmlw-e5-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.98,"CDSC-R":93.5,"SICK-R-PL":76.04,"STS22 (pl)":40.4} -{"level_0":8,"index":19,"Rank":9,"Model":"st-polish-kartonberta-base-alpha-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.47,"CDSC-R":92.13,"SICK-R-PL":79.51,"STS22 (pl)":36.78} -{"level_0":9,"index":40,"Rank":10,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":68.78,"CDSC-R":89.62,"SICK-R-PL":76.37,"STS22 (pl)":40.36} -{"level_0":10,"index":37,"Rank":11,"Model":"mmlw-e5-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.77,"CDSC-R":91.98,"SICK-R-PL":73.71,"STS22 (pl)":40.63} -{"level_0":11,"index":1,"Rank":12,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.12,"CDSC-R":89.74,"SICK-R-PL":73.78,"STS22 (pl)":40.83} -{"level_0":12,"index":33,"Rank":13,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.12,"CDSC-R":89.74,"SICK-R-PL":73.78,"STS22 (pl)":40.83} -{"level_0":13,"index":41,"Rank":14,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":67.36,"CDSC-R":88.55,"SICK-R-PL":76.18,"STS22 (pl)":37.34} -{"level_0":14,"index":26,"Rank":15,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":66.91,"CDSC-R":91.0,"SICK-R-PL":75.08,"STS22 (pl)":34.66} -{"level_0":15,"index":52,"Rank":16,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":65.19,"CDSC-R":88.8,"SICK-R-PL":73.13,"STS22 (pl)":33.64} -{"level_0":16,"index":28,"Rank":17,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":65.18,"CDSC-R":90.27,"SICK-R-PL":69.46,"STS22 (pl)":35.8} -{"level_0":17,"index":25,"Rank":18,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":65.13,"CDSC-R":90.08,"SICK-R-PL":71.23,"STS22 (pl)":34.07} -{"level_0":18,"index":30,"Rank":19,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":65.01,"CDSC-R":89.09,"SICK-R-PL":67.26,"STS22 (pl)":38.69} -{"level_0":19,"index":51,"Rank":20,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":63.83,"CDSC-R":88.98,"SICK-R-PL":68.77,"STS22 (pl)":33.73} -{"level_0":20,"index":42,"Rank":21,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":63.57,"CDSC-R":85.53,"SICK-R-PL":65.9,"STS22 (pl)":39.28} -{"level_0":21,"index":29,"Rank":22,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":63.53,"CDSC-R":86.18,"SICK-R-PL":64.67,"STS22 (pl)":39.73} -{"level_0":22,"index":48,"Rank":23,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":62.59,"CDSC-R":87.67,"SICK-R-PL":65.53,"STS22 (pl)":34.58} -{"level_0":23,"index":23,"Rank":24,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.02,"CDSC-R":85.77,"SICK-R-PL":62.98,"STS22 (pl)":25.31} -{"level_0":24,"index":0,"Rank":25,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":18.34} -{"level_0":25,"index":8,"Rank":30,"Model":"2024-06-15_10-09-42<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":0.46,"SICK-R-PL":0.43,"STS22 (pl)":""} -{"level_0":26,"index":9,"Rank":31,"Model":"2024-06-17_21-37-12<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":0.41,"SICK-R-PL":0.32,"STS22 (pl)":""} -{"level_0":27,"index":10,"Rank":32,"Model":"2024-06-19_08-22-22<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":0.57,"SICK-R-PL":0.39,"STS22 (pl)":""} -{"level_0":28,"index":11,"Rank":33,"Model":"2024-06-19_10-03-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":0.62,"SICK-R-PL":0.42,"STS22 (pl)":""} -{"level_0":29,"index":12,"Rank":34,"Model":"2024-06-19_21-12-17<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":0.3,"SICK-R-PL":0.28,"STS22 (pl)":""} -{"level_0":30,"index":13,"Rank":35,"Model":"2024-06-19_22-23-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":0.65,"SICK-R-PL":0.55,"STS22 (pl)":""} -{"level_0":31,"index":15,"Rank":37,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":35.38} -{"level_0":32,"index":16,"Rank":38,"Model":"paraphrase-multilingual-mpnet-base-v2-KE_Sieve<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":36.37} -{"level_0":33,"index":17,"Rank":39,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":23.31} -{"level_0":34,"index":18,"Rank":40,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":14.91} -{"level_0":35,"index":20,"Rank":41,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":42.08} -{"level_0":36,"index":21,"Rank":42,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":31.13} -{"level_0":37,"index":24,"Rank":43,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":39.21} -{"level_0":38,"index":31,"Rank":45,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":27.98} -{"level_0":39,"index":32,"Rank":46,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":15.06} -{"level_0":40,"index":43,"Rank":47,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":19.22} -{"level_0":41,"index":44,"Rank":48,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":26.77} -{"level_0":42,"index":46,"Rank":50,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":13.56} -{"level_0":43,"index":47,"Rank":51,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":12.47} -{"level_0":44,"index":49,"Rank":52,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":30.68} -{"level_0":45,"index":50,"Rank":53,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":33.74} -{"level_0":46,"index":53,"Rank":54,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":24.42} -{"level_0":47,"index":54,"Rank":55,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":25.0} -{"level_0":48,"index":55,"Rank":56,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":27.04} -{"level_0":49,"index":56,"Rank":57,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":9.71} -{"level_0":50,"index":57,"Rank":58,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CDSC-R":"","SICK-R-PL":"","STS22 (pl)":34.81} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":83.04,"CDSC-R":91.0,"CDSC-R (pol-Latn)":91.0,"SICK-R-PL":75.08,"SICK-R-PL (pol-Latn)":75.08} +{"Rank":2,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":80.96,"CDSC-R":88.8,"CDSC-R (pol-Latn)":88.8,"SICK-R-PL":73.13,"SICK-R-PL (pol-Latn)":73.13} +{"Rank":3,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":80.66,"CDSC-R":90.08,"CDSC-R (pol-Latn)":90.09,"SICK-R-PL":71.23,"SICK-R-PL (pol-Latn)":71.23} +{"Rank":4,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":79.86,"CDSC-R":90.27,"CDSC-R (pol-Latn)":90.27,"SICK-R-PL":69.46,"SICK-R-PL (pol-Latn)":69.45} +{"Rank":5,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":78.88,"CDSC-R":88.98,"CDSC-R (pol-Latn)":88.98,"SICK-R-PL":68.77,"SICK-R-PL (pol-Latn)":68.77} +{"Rank":6,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":75.72,"CDSC-R":85.53,"CDSC-R (pol-Latn)":85.53,"SICK-R-PL":65.9,"SICK-R-PL (pol-Latn)":65.9} +{"Rank":7,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":null,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":null} +{"Rank":8,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":92.23,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":72.78} +{"Rank":9,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"CDSC-R":86.18,"CDSC-R (pol-Latn)":null,"SICK-R-PL":64.67,"SICK-R-PL (pol-Latn)":null} +{"Rank":10,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"CDSC-R":89.09,"CDSC-R (pol-Latn)":null,"SICK-R-PL":67.26,"SICK-R-PL (pol-Latn)":null} +{"Rank":11,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"CDSC-R":89.62,"CDSC-R (pol-Latn)":null,"SICK-R-PL":76.37,"SICK-R-PL (pol-Latn)":null} +{"Rank":12,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":null,"CDSC-R":88.55,"CDSC-R (pol-Latn)":null,"SICK-R-PL":76.18,"SICK-R-PL (pol-Latn)":null} +{"Rank":13,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":82.5,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":54.26} +{"Rank":14,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":79.45,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":52.43} +{"Rank":15,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":77.04,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":50.2} +{"Rank":16,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":null,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":null} +{"Rank":17,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":null,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":null} +{"Rank":18,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"CDSC-R":87.67,"CDSC-R (pol-Latn)":null,"SICK-R-PL":65.53,"SICK-R-PL (pol-Latn)":null} +{"Rank":19,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":null,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":null} +{"Rank":20,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":null,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":null} +{"Rank":21,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":null,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":null} +{"Rank":22,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":null,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":null} +{"Rank":23,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CDSC-R":null,"CDSC-R (pol-Latn)":null,"SICK-R-PL":null,"SICK-R-PL (pol-Latn)":null} diff --git a/boards_data/rar-b/data_tasks/Retrieval/default.jsonl b/boards_data/rar-b/data_tasks/Retrieval/default.jsonl index e3099ac6d72f5ac16cff6cb015187ddc01808101..f8e7b0c80b54ecdeb6458ec3951d80e63e33b160 100644 --- a/boards_data/rar-b/data_tasks/Retrieval/default.jsonl +++ b/boards_data/rar-b/data_tasks/Retrieval/default.jsonl @@ -1,30 +1,36 @@ -{"level_0":0,"index":13,"Rank":1,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":35.2,"ARCChallenge":26.68,"AlphaNLI":34.0,"HellaSwag":39.45,"PIQA":44.35,"Quail":11.69,"RARbCode":84.0,"RARbMath":82.35,"SIQA":7.23,"SpartQA":9.29,"TempReasonL1":7.15,"TempReasonL2Fact":58.38,"TempReasonL2Pure":11.22,"TempReasonL3Fact":44.29,"TempReasonL3Pure":14.15,"WinoGrande":53.74} -{"level_0":1,"index":26,"Rank":2,"Model":"text-embedding-3-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":31.13,"ARCChallenge":21.22,"AlphaNLI":34.23,"HellaSwag":31.4,"PIQA":37.52,"Quail":13.6,"RARbCode":89.41,"RARbMath":87.73,"SIQA":4.99,"SpartQA":7.45,"TempReasonL1":2.07,"TempReasonL2Fact":39.77,"TempReasonL2Pure":11.04,"TempReasonL3Fact":37.04,"TempReasonL3Pure":15.51,"WinoGrande":33.92} -{"level_0":2,"index":12,"Rank":3,"Model":"GritLM-7B-noinstruct<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":30.57,"ARCChallenge":16.57,"AlphaNLI":29.56,"HellaSwag":36.03,"PIQA":35.8,"Quail":8.68,"RARbCode":83.14,"RARbMath":83.01,"SIQA":5.73,"SpartQA":1.56,"TempReasonL1":2.57,"TempReasonL2Fact":48.25,"TempReasonL2Pure":8.98,"TempReasonL3Fact":34.11,"TempReasonL3Pure":12.44,"WinoGrande":52.12} -{"level_0":3,"index":27,"Rank":4,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.95,"ARCChallenge":23.98,"AlphaNLI":37.27,"HellaSwag":34.12,"PIQA":41.96,"Quail":10.15,"RARbCode":89.64,"RARbMath":90.08,"SIQA":3.44,"SpartQA":7.51,"TempReasonL1":2.13,"TempReasonL2Fact":28.65,"TempReasonL2Pure":10.34,"TempReasonL3Fact":25.52,"TempReasonL3Pure":15.28,"WinoGrande":29.11} -{"level_0":4,"index":16,"Rank":5,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":28.41,"ARCChallenge":17.81,"AlphaNLI":26.12,"HellaSwag":34.85,"PIQA":39.37,"Quail":7.01,"RARbCode":78.46,"RARbMath":72.16,"SIQA":5.42,"SpartQA":9.92,"TempReasonL1":3.31,"TempReasonL2Fact":36.9,"TempReasonL2Pure":9.18,"TempReasonL3Fact":30.18,"TempReasonL3Pure":14.31,"WinoGrande":41.21} -{"level_0":5,"index":17,"Rank":6,"Model":"e5-mistral-7b-instruct-noinstruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":28.04,"ARCChallenge":20.48,"AlphaNLI":18.88,"HellaSwag":32.25,"PIQA":32.8,"Quail":6.25,"RARbCode":79.84,"RARbMath":76.19,"SIQA":5.08,"SpartQA":10.87,"TempReasonL1":3.04,"TempReasonL2Fact":35.63,"TempReasonL2Pure":9.32,"TempReasonL3Fact":30.41,"TempReasonL3Pure":14.39,"WinoGrande":45.18} -{"level_0":6,"index":10,"Rank":7,"Model":"Cohere-embed-english-v3.0-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":25.41,"ARCChallenge":10.1,"AlphaNLI":18.75,"HellaSwag":29.02,"PIQA":27.89,"Quail":7.77,"RARbCode":56.56,"RARbMath":72.05,"SIQA":5.03,"SpartQA":3.33,"TempReasonL1":1.43,"TempReasonL2Fact":40.46,"TempReasonL2Pure":2.39,"TempReasonL3Fact":33.87,"TempReasonL3Pure":7.52,"WinoGrande":65.02} -{"level_0":7,"index":29,"Rank":8,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":24.2,"ARCChallenge":14.63,"AlphaNLI":30.61,"HellaSwag":30.94,"PIQA":33.69,"Quail":6.11,"RARbCode":72.03,"RARbMath":71.07,"SIQA":3.03,"SpartQA":6.63,"TempReasonL1":2.35,"TempReasonL2Fact":25.68,"TempReasonL2Pure":2.76,"TempReasonL3Fact":22.09,"TempReasonL3Pure":9.79,"WinoGrande":31.53} -{"level_0":8,"index":11,"Rank":9,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":23.65,"ARCChallenge":9.89,"AlphaNLI":15.1,"HellaSwag":26.35,"PIQA":28.49,"Quail":4.1,"RARbCode":57.19,"RARbMath":72.26,"SIQA":4.26,"SpartQA":3.75,"TempReasonL1":1.5,"TempReasonL2Fact":35.91,"TempReasonL2Pure":1.89,"TempReasonL3Fact":27.51,"TempReasonL3Pure":8.53,"WinoGrande":58.01} -{"level_0":9,"index":25,"Rank":10,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":22.57,"ARCChallenge":13.3,"AlphaNLI":25.65,"HellaSwag":29.29,"PIQA":31.02,"Quail":5.83,"RARbCode":83.39,"RARbMath":73.21,"SIQA":3.14,"SpartQA":4.23,"TempReasonL1":1.68,"TempReasonL2Fact":19.93,"TempReasonL2Pure":2.6,"TempReasonL3Fact":18.02,"TempReasonL3Pure":7.58,"WinoGrande":19.65} -{"level_0":10,"index":28,"Rank":11,"Model":"text-embedding-3-small-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":22.09,"ARCChallenge":13.76,"AlphaNLI":21.14,"HellaSwag":27.2,"PIQA":29.59,"Quail":6.64,"RARbCode":72.14,"RARbMath":64.31,"SIQA":2.98,"SpartQA":3.58,"TempReasonL1":2.29,"TempReasonL2Fact":26.34,"TempReasonL2Pure":3.17,"TempReasonL3Fact":22.72,"TempReasonL3Pure":9.98,"WinoGrande":25.49} -{"level_0":11,"index":7,"Rank":12,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":21.48,"ARCChallenge":9.02,"AlphaNLI":24.73,"HellaSwag":25.67,"PIQA":22.93,"Quail":7.51,"RARbCode":38.8,"RARbMath":69.19,"SIQA":4.89,"SpartQA":7.49,"TempReasonL1":0.99,"TempReasonL2Fact":33.23,"TempReasonL2Pure":0.68,"TempReasonL3Fact":30.05,"TempReasonL3Pure":5.28,"WinoGrande":41.72} -{"level_0":12,"index":6,"Rank":13,"Model":"bge-m3-instruct<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":20.83,"ARCChallenge":9.03,"AlphaNLI":24.69,"HellaSwag":25.55,"PIQA":19.03,"Quail":7.08,"RARbCode":39.58,"RARbMath":64.51,"SIQA":4.77,"SpartQA":7.0,"TempReasonL1":0.8,"TempReasonL2Fact":34.99,"TempReasonL2Pure":0.62,"TempReasonL3Fact":32.47,"TempReasonL3Pure":7.01,"WinoGrande":35.33} -{"level_0":13,"index":20,"Rank":14,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":19.61,"ARCChallenge":9.48,"AlphaNLI":28.19,"HellaSwag":24.21,"PIQA":25.28,"Quail":3.92,"RARbCode":44.27,"RARbMath":68.19,"SIQA":1.56,"SpartQA":1.65,"TempReasonL1":1.53,"TempReasonL2Fact":17.65,"TempReasonL2Pure":0.46,"TempReasonL3Fact":14.16,"TempReasonL3Pure":6.33,"WinoGrande":47.33} -{"level_0":14,"index":24,"Rank":15,"Model":"text-embedding-ada-002-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":19.56,"ARCChallenge":11.85,"AlphaNLI":10.62,"HellaSwag":24.8,"PIQA":23.87,"Quail":5.79,"RARbCode":82.36,"RARbMath":67.26,"SIQA":2.64,"SpartQA":4.75,"TempReasonL1":1.44,"TempReasonL2Fact":19.38,"TempReasonL2Pure":2.43,"TempReasonL3Fact":17.58,"TempReasonL3Pure":7.31,"WinoGrande":11.36} -{"level_0":15,"index":1,"Rank":16,"Model":"dragon-plus<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":19.1,"ARCChallenge":8.91,"AlphaNLI":32.1,"HellaSwag":27.69,"PIQA":28.01,"Quail":4.09,"RARbCode":17.58,"RARbMath":45.09,"SIQA":2.0,"SpartQA":10.34,"TempReasonL1":1.82,"TempReasonL2Fact":17.45,"TempReasonL2Pure":0.55,"TempReasonL3Fact":15.71,"TempReasonL3Pure":7.97,"WinoGrande":67.18} -{"level_0":16,"index":22,"Rank":17,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":18.03,"ARCChallenge":11.8,"AlphaNLI":22.41,"HellaSwag":26.27,"PIQA":29.03,"Quail":3.41,"RARbCode":53.21,"RARbMath":71.85,"SIQA":2.38,"SpartQA":0.22,"TempReasonL1":1.77,"TempReasonL2Fact":11.2,"TempReasonL2Pure":1.15,"TempReasonL3Fact":9.42,"TempReasonL3Pure":5.59,"WinoGrande":20.8} -{"level_0":17,"index":5,"Rank":18,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":17.7,"ARCChallenge":9.99,"AlphaNLI":13.13,"HellaSwag":28.5,"PIQA":27.99,"Quail":1.83,"RARbCode":48.12,"RARbMath":57.36,"SIQA":1.04,"SpartQA":2.99,"TempReasonL1":1.46,"TempReasonL2Fact":24.25,"TempReasonL2Pure":2.35,"TempReasonL3Fact":20.64,"TempReasonL3Pure":6.67,"WinoGrande":19.18} -{"level_0":18,"index":18,"Rank":19,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":17.35,"ARCChallenge":10.23,"AlphaNLI":25.35,"HellaSwag":24.08,"PIQA":26.44,"Quail":3.08,"RARbCode":42.44,"RARbMath":66.36,"SIQA":2.09,"SpartQA":2.67,"TempReasonL1":1.66,"TempReasonL2Fact":10.31,"TempReasonL2Pure":0.63,"TempReasonL3Fact":11.11,"TempReasonL3Pure":6.63,"WinoGrande":27.2} -{"level_0":19,"index":0,"Rank":20,"Model":"dragon-plus-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":16.73,"ARCChallenge":8.24,"AlphaNLI":25.18,"HellaSwag":24.06,"PIQA":26.35,"Quail":4.2,"RARbCode":12.84,"RARbMath":36.15,"SIQA":1.75,"SpartQA":10.82,"TempReasonL1":1.54,"TempReasonL2Fact":16.11,"TempReasonL2Pure":0.57,"TempReasonL3Fact":14.81,"TempReasonL3Pure":7.46,"WinoGrande":60.84} -{"level_0":20,"index":19,"Rank":21,"Model":"all-MiniLM-L6-v2-instruct<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":15.95,"ARCChallenge":9.4,"AlphaNLI":15.09,"HellaSwag":20.51,"PIQA":24.68,"Quail":3.46,"RARbCode":42.47,"RARbMath":62.39,"SIQA":1.53,"SpartQA":0.57,"TempReasonL1":1.05,"TempReasonL2Fact":16.57,"TempReasonL2Pure":0.49,"TempReasonL3Fact":14.01,"TempReasonL3Pure":6.27,"WinoGrande":20.73} -{"level_0":21,"index":15,"Rank":22,"Model":"contriever<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":15.86,"ARCChallenge":8.62,"AlphaNLI":31.77,"HellaSwag":14.42,"PIQA":24.64,"Quail":4.97,"RARbCode":9.28,"RARbMath":30.76,"SIQA":1.27,"SpartQA":10.94,"TempReasonL1":1.93,"TempReasonL2Fact":22.68,"TempReasonL2Pure":1.12,"TempReasonL3Fact":20.62,"TempReasonL3Pure":7.8,"WinoGrande":47.15} -{"level_0":22,"index":3,"Rank":23,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":14.93,"ARCChallenge":9.66,"AlphaNLI":10.99,"HellaSwag":26.64,"PIQA":25.69,"Quail":1.42,"RARbCode":46.47,"RARbMath":46.86,"SIQA":0.94,"SpartQA":3.37,"TempReasonL1":1.07,"TempReasonL2Fact":17.23,"TempReasonL2Pure":1.29,"TempReasonL3Fact":13.36,"TempReasonL3Pure":5.2,"WinoGrande":13.76} -{"level_0":23,"index":4,"Rank":24,"Model":"bge-large-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":14.55,"ARCChallenge":8.86,"AlphaNLI":0.86,"HellaSwag":26.24,"PIQA":23.26,"Quail":2.72,"RARbCode":45.25,"RARbMath":49.82,"SIQA":0.59,"SpartQA":2.34,"TempReasonL1":1.17,"TempReasonL2Fact":21.19,"TempReasonL2Pure":2.1,"TempReasonL3Fact":17.59,"TempReasonL3Pure":5.99,"WinoGrande":10.31} -{"level_0":24,"index":9,"Rank":25,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":14.15,"ARCChallenge":8.95,"AlphaNLI":11.64,"HellaSwag":25.44,"PIQA":23.92,"Quail":1.75,"RARbCode":42.36,"RARbMath":44.98,"SIQA":0.77,"SpartQA":3.55,"TempReasonL1":1.41,"TempReasonL2Fact":17.56,"TempReasonL2Pure":1.05,"TempReasonL3Fact":13.88,"TempReasonL3Pure":4.76,"WinoGrande":10.28} -{"level_0":25,"index":21,"Rank":26,"Model":"all-mpnet-base-v2-instruct<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":13.84,"ARCChallenge":10.35,"AlphaNLI":1.96,"HellaSwag":13.01,"PIQA":27.18,"Quail":3.02,"RARbCode":48.95,"RARbMath":69.21,"SIQA":1.29,"SpartQA":1.01,"TempReasonL1":1.52,"TempReasonL2Fact":7.28,"TempReasonL2Pure":1.03,"TempReasonL3Fact":7.03,"TempReasonL3Pure":5.16,"WinoGrande":9.66} -{"level_0":26,"index":2,"Rank":27,"Model":"bge-base-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":13.52,"ARCChallenge":8.85,"AlphaNLI":4.13,"HellaSwag":24.03,"PIQA":23.03,"Quail":1.25,"RARbCode":46.32,"RARbMath":45.62,"SIQA":0.24,"SpartQA":2.67,"TempReasonL1":0.8,"TempReasonL2Fact":16.56,"TempReasonL2Pure":1.33,"TempReasonL3Fact":12.68,"TempReasonL3Pure":5.08,"WinoGrande":10.27} -{"level_0":27,"index":8,"Rank":28,"Model":"bge-small-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":12.6,"ARCChallenge":7.72,"AlphaNLI":1.26,"HellaSwag":23.41,"PIQA":20.79,"Quail":2.01,"RARbCode":41.52,"RARbMath":46.5,"SIQA":0.98,"SpartQA":2.86,"TempReasonL1":1.27,"TempReasonL2Fact":16.72,"TempReasonL2Pure":1.1,"TempReasonL3Fact":12.81,"TempReasonL3Pure":4.63,"WinoGrande":5.35} -{"level_0":28,"index":14,"Rank":29,"Model":"contriever-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":"","ARCChallenge":7.63,"AlphaNLI":27.09,"HellaSwag":"","PIQA":21.73,"Quail":4.92,"RARbCode":7.12,"RARbMath":21.83,"SIQA":0.88,"SpartQA":10.56,"TempReasonL1":1.8,"TempReasonL2Fact":22.03,"TempReasonL2Pure":0.94,"TempReasonL3Fact":20.82,"TempReasonL3Pure":7.15,"WinoGrande":26.3} -{"level_0":29,"index":23,"Rank":30,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","ARCChallenge":"","AlphaNLI":"","HellaSwag":"","PIQA":"","Quail":"","RARbCode":"","RARbMath":"","SIQA":"","SpartQA":"","TempReasonL1":"","TempReasonL2Fact":"","TempReasonL2Pure":"","TempReasonL3Fact":"","TempReasonL3Pure":"","WinoGrande":""} +{"Rank":1,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":35.2,"ARCChallenge":26.68,"AlphaNLI":34.0,"HellaSwag":39.45,"PIQA":44.35,"Quail":11.69,"RARbCode":84.0,"RARbMath":82.35,"SIQA":7.23,"SpartQA":9.29,"TempReasonL1":7.15,"TempReasonL2Fact":58.38,"TempReasonL2Pure":11.22,"TempReasonL3Fact":44.29,"TempReasonL3Pure":14.15,"WinoGrande":53.74} +{"Rank":2,"Model":"text-embedding-3-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":31.13,"ARCChallenge":21.22,"AlphaNLI":34.23,"HellaSwag":31.4,"PIQA":37.52,"Quail":13.6,"RARbCode":89.41,"RARbMath":87.73,"SIQA":4.99,"SpartQA":7.45,"TempReasonL1":2.07,"TempReasonL2Fact":39.77,"TempReasonL2Pure":11.04,"TempReasonL3Fact":37.04,"TempReasonL3Pure":15.51,"WinoGrande":33.92} +{"Rank":3,"Model":"GritLM-7B-noinstruct<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":30.57,"ARCChallenge":16.57,"AlphaNLI":29.56,"HellaSwag":36.03,"PIQA":35.8,"Quail":8.68,"RARbCode":83.14,"RARbMath":83.01,"SIQA":5.73,"SpartQA":1.56,"TempReasonL1":2.57,"TempReasonL2Fact":48.25,"TempReasonL2Pure":8.98,"TempReasonL3Fact":34.11,"TempReasonL3Pure":12.44,"WinoGrande":52.12} +{"Rank":4,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.95,"ARCChallenge":23.98,"AlphaNLI":37.27,"HellaSwag":34.12,"PIQA":41.96,"Quail":10.15,"RARbCode":89.64,"RARbMath":90.08,"SIQA":3.44,"SpartQA":7.51,"TempReasonL1":2.13,"TempReasonL2Fact":28.65,"TempReasonL2Pure":10.34,"TempReasonL3Fact":25.52,"TempReasonL3Pure":15.28,"WinoGrande":29.11} +{"Rank":5,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":28.41,"ARCChallenge":17.81,"AlphaNLI":26.12,"HellaSwag":34.85,"PIQA":39.37,"Quail":7.01,"RARbCode":78.46,"RARbMath":72.16,"SIQA":5.42,"SpartQA":9.92,"TempReasonL1":3.31,"TempReasonL2Fact":36.9,"TempReasonL2Pure":9.18,"TempReasonL3Fact":30.18,"TempReasonL3Pure":14.31,"WinoGrande":41.21} +{"Rank":6,"Model":"e5-mistral-7b-instruct-noinstruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":28.04,"ARCChallenge":20.48,"AlphaNLI":18.88,"HellaSwag":32.25,"PIQA":32.8,"Quail":6.25,"RARbCode":79.84,"RARbMath":76.19,"SIQA":5.08,"SpartQA":10.87,"TempReasonL1":3.04,"TempReasonL2Fact":35.63,"TempReasonL2Pure":9.32,"TempReasonL3Fact":30.41,"TempReasonL3Pure":14.39,"WinoGrande":45.18} +{"Rank":7,"Model":"Cohere-embed-english-v3.0-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":25.41,"ARCChallenge":10.1,"AlphaNLI":18.75,"HellaSwag":29.02,"PIQA":27.89,"Quail":7.77,"RARbCode":56.56,"RARbMath":72.05,"SIQA":5.03,"SpartQA":3.33,"TempReasonL1":1.43,"TempReasonL2Fact":40.46,"TempReasonL2Pure":2.39,"TempReasonL3Fact":33.87,"TempReasonL3Pure":7.52,"WinoGrande":65.02} +{"Rank":8,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":24.69,"ARCChallenge":10.83,"AlphaNLI":13.59,"HellaSwag":27.35,"PIQA":28.82,"Quail":4.85,"RARbCode":58.92,"RARbMath":67.32,"SIQA":5.36,"SpartQA":5.64,"TempReasonL1":1.14,"TempReasonL2Fact":42.97,"TempReasonL2Pure":2.05,"TempReasonL3Fact":38.22,"TempReasonL3Pure":8.31,"WinoGrande":54.99} +{"Rank":9,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":24.2,"ARCChallenge":14.63,"AlphaNLI":30.61,"HellaSwag":30.94,"PIQA":33.69,"Quail":6.11,"RARbCode":72.03,"RARbMath":71.07,"SIQA":3.03,"SpartQA":6.63,"TempReasonL1":2.35,"TempReasonL2Fact":25.68,"TempReasonL2Pure":2.76,"TempReasonL3Fact":22.09,"TempReasonL3Pure":9.79,"WinoGrande":31.53} +{"Rank":10,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":23.65,"ARCChallenge":9.89,"AlphaNLI":15.1,"HellaSwag":26.35,"PIQA":28.49,"Quail":4.1,"RARbCode":57.19,"RARbMath":72.26,"SIQA":4.26,"SpartQA":3.75,"TempReasonL1":1.5,"TempReasonL2Fact":35.91,"TempReasonL2Pure":1.89,"TempReasonL3Fact":27.51,"TempReasonL3Pure":8.53,"WinoGrande":58.01} +{"Rank":11,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":23.26,"ARCChallenge":9.61,"AlphaNLI":16.44,"HellaSwag":24.79,"PIQA":25.09,"Quail":3.52,"RARbCode":52.16,"RARbMath":65.35,"SIQA":3.72,"SpartQA":7.91,"TempReasonL1":0.72,"TempReasonL2Fact":38.76,"TempReasonL2Pure":1.63,"TempReasonL3Fact":35.85,"TempReasonL3Pure":7.11,"WinoGrande":56.18} +{"Rank":12,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":22.57,"ARCChallenge":13.3,"AlphaNLI":25.65,"HellaSwag":29.29,"PIQA":31.02,"Quail":5.83,"RARbCode":83.39,"RARbMath":73.21,"SIQA":3.14,"SpartQA":4.23,"TempReasonL1":1.68,"TempReasonL2Fact":19.93,"TempReasonL2Pure":2.6,"TempReasonL3Fact":18.02,"TempReasonL3Pure":7.58,"WinoGrande":19.65} +{"Rank":13,"Model":"text-embedding-3-small-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":22.09,"ARCChallenge":13.76,"AlphaNLI":21.14,"HellaSwag":27.2,"PIQA":29.59,"Quail":6.64,"RARbCode":72.14,"RARbMath":64.31,"SIQA":2.98,"SpartQA":3.58,"TempReasonL1":2.29,"TempReasonL2Fact":26.34,"TempReasonL2Pure":3.17,"TempReasonL3Fact":22.72,"TempReasonL3Pure":9.98,"WinoGrande":25.49} +{"Rank":14,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":21.48,"ARCChallenge":9.02,"AlphaNLI":24.73,"HellaSwag":25.67,"PIQA":22.93,"Quail":7.51,"RARbCode":38.8,"RARbMath":69.19,"SIQA":4.89,"SpartQA":7.49,"TempReasonL1":0.99,"TempReasonL2Fact":33.23,"TempReasonL2Pure":0.68,"TempReasonL3Fact":30.05,"TempReasonL3Pure":5.28,"WinoGrande":41.72} +{"Rank":15,"Model":"bge-m3-instruct<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":20.83,"ARCChallenge":9.03,"AlphaNLI":24.69,"HellaSwag":25.55,"PIQA":19.03,"Quail":7.08,"RARbCode":39.58,"RARbMath":64.51,"SIQA":4.77,"SpartQA":7.0,"TempReasonL1":0.8,"TempReasonL2Fact":34.99,"TempReasonL2Pure":0.62,"TempReasonL3Fact":32.47,"TempReasonL3Pure":7.01,"WinoGrande":35.33} +{"Rank":16,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":20.04,"ARCChallenge":7.14,"AlphaNLI":13.0,"HellaSwag":23.73,"PIQA":21.08,"Quail":2.38,"RARbCode":46.96,"RARbMath":63.91,"SIQA":2.57,"SpartQA":5.43,"TempReasonL1":0.8,"TempReasonL2Fact":36.76,"TempReasonL2Pure":0.62,"TempReasonL3Fact":32.42,"TempReasonL3Pure":6.36,"WinoGrande":37.46} +{"Rank":17,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":19.61,"ARCChallenge":9.48,"AlphaNLI":28.19,"HellaSwag":24.21,"PIQA":25.28,"Quail":3.92,"RARbCode":44.27,"RARbMath":68.19,"SIQA":1.56,"SpartQA":1.65,"TempReasonL1":1.53,"TempReasonL2Fact":17.65,"TempReasonL2Pure":0.46,"TempReasonL3Fact":14.16,"TempReasonL3Pure":6.33,"WinoGrande":47.33} +{"Rank":18,"Model":"text-embedding-ada-002-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":19.56,"ARCChallenge":11.85,"AlphaNLI":10.62,"HellaSwag":24.8,"PIQA":23.87,"Quail":5.79,"RARbCode":82.36,"RARbMath":67.26,"SIQA":2.64,"SpartQA":4.75,"TempReasonL1":1.44,"TempReasonL2Fact":19.38,"TempReasonL2Pure":2.43,"TempReasonL3Fact":17.58,"TempReasonL3Pure":7.31,"WinoGrande":11.36} +{"Rank":19,"Model":"dragon-plus<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":19.1,"ARCChallenge":8.91,"AlphaNLI":32.1,"HellaSwag":27.69,"PIQA":28.01,"Quail":4.09,"RARbCode":17.58,"RARbMath":45.09,"SIQA":2.0,"SpartQA":10.34,"TempReasonL1":1.82,"TempReasonL2Fact":17.45,"TempReasonL2Pure":0.55,"TempReasonL3Fact":15.71,"TempReasonL3Pure":7.97,"WinoGrande":67.18} +{"Rank":20,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":18.03,"ARCChallenge":11.8,"AlphaNLI":22.41,"HellaSwag":26.27,"PIQA":29.03,"Quail":3.41,"RARbCode":53.21,"RARbMath":71.85,"SIQA":2.38,"SpartQA":0.22,"TempReasonL1":1.77,"TempReasonL2Fact":11.2,"TempReasonL2Pure":1.15,"TempReasonL3Fact":9.42,"TempReasonL3Pure":5.59,"WinoGrande":20.8} +{"Rank":21,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":17.7,"ARCChallenge":9.99,"AlphaNLI":13.13,"HellaSwag":28.5,"PIQA":27.99,"Quail":1.83,"RARbCode":48.12,"RARbMath":57.36,"SIQA":1.04,"SpartQA":2.99,"TempReasonL1":1.46,"TempReasonL2Fact":24.25,"TempReasonL2Pure":2.35,"TempReasonL3Fact":20.64,"TempReasonL3Pure":6.67,"WinoGrande":19.18} +{"Rank":22,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":17.35,"ARCChallenge":10.23,"AlphaNLI":25.35,"HellaSwag":24.08,"PIQA":26.44,"Quail":3.08,"RARbCode":42.44,"RARbMath":66.36,"SIQA":2.09,"SpartQA":2.67,"TempReasonL1":1.66,"TempReasonL2Fact":10.31,"TempReasonL2Pure":0.63,"TempReasonL3Fact":11.11,"TempReasonL3Pure":6.63,"WinoGrande":27.2} +{"Rank":23,"Model":"dragon-plus-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":16.73,"ARCChallenge":8.24,"AlphaNLI":25.18,"HellaSwag":24.06,"PIQA":26.35,"Quail":4.2,"RARbCode":12.84,"RARbMath":36.15,"SIQA":1.75,"SpartQA":10.82,"TempReasonL1":1.54,"TempReasonL2Fact":16.11,"TempReasonL2Pure":0.57,"TempReasonL3Fact":14.81,"TempReasonL3Pure":7.46,"WinoGrande":60.84} +{"Rank":24,"Model":"contriever<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":16.09,"ARCChallenge":8.62,"AlphaNLI":31.77,"HellaSwag":17.73,"PIQA":24.64,"Quail":4.97,"RARbCode":9.28,"RARbMath":30.76,"SIQA":1.27,"SpartQA":10.94,"TempReasonL1":1.93,"TempReasonL2Fact":22.68,"TempReasonL2Pure":1.12,"TempReasonL3Fact":20.62,"TempReasonL3Pure":7.8,"WinoGrande":47.15} +{"Rank":25,"Model":"all-MiniLM-L6-v2-instruct<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":15.95,"ARCChallenge":9.4,"AlphaNLI":15.09,"HellaSwag":20.51,"PIQA":24.68,"Quail":3.46,"RARbCode":42.47,"RARbMath":62.39,"SIQA":1.53,"SpartQA":0.57,"TempReasonL1":1.05,"TempReasonL2Fact":16.57,"TempReasonL2Pure":0.49,"TempReasonL3Fact":14.01,"TempReasonL3Pure":6.27,"WinoGrande":20.73} +{"Rank":26,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":14.93,"ARCChallenge":9.66,"AlphaNLI":10.99,"HellaSwag":26.64,"PIQA":25.69,"Quail":1.42,"RARbCode":46.47,"RARbMath":46.86,"SIQA":0.94,"SpartQA":3.37,"TempReasonL1":1.07,"TempReasonL2Fact":17.23,"TempReasonL2Pure":1.29,"TempReasonL3Fact":13.36,"TempReasonL3Pure":5.2,"WinoGrande":13.76} +{"Rank":27,"Model":"bge-large-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":14.55,"ARCChallenge":8.86,"AlphaNLI":0.86,"HellaSwag":26.24,"PIQA":23.26,"Quail":2.72,"RARbCode":45.25,"RARbMath":49.82,"SIQA":0.59,"SpartQA":2.34,"TempReasonL1":1.17,"TempReasonL2Fact":21.19,"TempReasonL2Pure":2.1,"TempReasonL3Fact":17.59,"TempReasonL3Pure":5.99,"WinoGrande":10.31} +{"Rank":28,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":14.15,"ARCChallenge":8.95,"AlphaNLI":11.64,"HellaSwag":25.44,"PIQA":23.92,"Quail":1.75,"RARbCode":42.36,"RARbMath":44.98,"SIQA":0.77,"SpartQA":3.55,"TempReasonL1":1.41,"TempReasonL2Fact":17.56,"TempReasonL2Pure":1.05,"TempReasonL3Fact":13.88,"TempReasonL3Pure":4.76,"WinoGrande":10.28} +{"Rank":29,"Model":"all-mpnet-base-v2-instruct<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":13.84,"ARCChallenge":10.35,"AlphaNLI":1.96,"HellaSwag":13.01,"PIQA":27.18,"Quail":3.02,"RARbCode":48.95,"RARbMath":69.21,"SIQA":1.29,"SpartQA":1.01,"TempReasonL1":1.52,"TempReasonL2Fact":7.28,"TempReasonL2Pure":1.03,"TempReasonL3Fact":7.03,"TempReasonL3Pure":5.16,"WinoGrande":9.66} +{"Rank":30,"Model":"bge-base-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":13.52,"ARCChallenge":8.85,"AlphaNLI":4.13,"HellaSwag":24.03,"PIQA":23.03,"Quail":1.25,"RARbCode":46.32,"RARbMath":45.62,"SIQA":0.24,"SpartQA":2.67,"TempReasonL1":0.8,"TempReasonL2Fact":16.56,"TempReasonL2Pure":1.33,"TempReasonL3Fact":12.68,"TempReasonL3Pure":5.08,"WinoGrande":10.27} +{"Rank":31,"Model":"bge-small-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":12.6,"ARCChallenge":7.72,"AlphaNLI":1.26,"HellaSwag":23.41,"PIQA":20.79,"Quail":2.01,"RARbCode":41.52,"RARbMath":46.5,"SIQA":0.98,"SpartQA":2.86,"TempReasonL1":1.27,"TempReasonL2Fact":16.72,"TempReasonL2Pure":1.1,"TempReasonL3Fact":12.81,"TempReasonL3Pure":4.63,"WinoGrande":5.35} +{"Rank":32,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":12.24,"ARCChallenge":7.19,"AlphaNLI":21.87,"HellaSwag":17.53,"PIQA":18.65,"Quail":2.98,"RARbCode":11.02,"RARbMath":30.93,"SIQA":1.21,"SpartQA":5.69,"TempReasonL1":1.94,"TempReasonL2Fact":5.34,"TempReasonL2Pure":0.33,"TempReasonL3Fact":6.79,"TempReasonL3Pure":3.19,"WinoGrande":49.01} +{"Rank":33,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":11.55,"ARCChallenge":6.19,"AlphaNLI":20.89,"HellaSwag":16.98,"PIQA":15.79,"Quail":2.96,"RARbCode":8.48,"RARbMath":30.02,"SIQA":0.88,"SpartQA":4.94,"TempReasonL1":1.43,"TempReasonL2Fact":6.21,"TempReasonL2Pure":0.22,"TempReasonL3Fact":6.77,"TempReasonL3Pure":4.9,"WinoGrande":46.52} +{"Rank":34,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":9.31,"ARCChallenge":3.78,"AlphaNLI":13.11,"HellaSwag":5.59,"PIQA":6.53,"Quail":1.91,"RARbCode":2.31,"RARbMath":27.19,"SIQA":1.07,"SpartQA":1.56,"TempReasonL1":1.56,"TempReasonL2Fact":7.06,"TempReasonL2Pure":0.14,"TempReasonL3Fact":8.74,"TempReasonL3Pure":4.73,"WinoGrande":54.3} +{"Rank":35,"Model":"contriever-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":"","ARCChallenge":7.63,"AlphaNLI":27.09,"HellaSwag":"","PIQA":21.73,"Quail":4.92,"RARbCode":7.12,"RARbMath":21.83,"SIQA":0.88,"SpartQA":10.56,"TempReasonL1":1.8,"TempReasonL2Fact":22.03,"TempReasonL2Pure":0.94,"TempReasonL3Fact":20.82,"TempReasonL3Pure":7.15,"WinoGrande":26.3} +{"Rank":36,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","ARCChallenge":3.85,"AlphaNLI":14.15,"HellaSwag":"","PIQA":"","Quail":"","RARbCode":"","RARbMath":"","SIQA":"","SpartQA":"","TempReasonL1":"","TempReasonL2Fact":"","TempReasonL2Pure":"","TempReasonL3Fact":"","TempReasonL3Pure":"","WinoGrande":""} diff --git a/boards_data/ru/data_overall/default.jsonl b/boards_data/ru/data_overall/default.jsonl index 74801e105e5d4722b4a8fab88180a72d9a320df6..4a5df167adf2a3f9129a75730eabf612b41d9c35 100644 --- a/boards_data/ru/data_overall/default.jsonl +++ b/boards_data/ru/data_overall/default.jsonl @@ -1,25 +1,25 @@ -{"index":13,"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (16 datasets)":67.64,"Classification Average (7 datasets)":64.57,"Clustering Average (3 datasets)":59.98,"PairClassification Average (1 datasets)":59.38,"Reranking Average (1 datasets)":74.61,"Retrieval Average (2 datasets)":77.96,"STS Average (2 datasets)":80.15} -{"index":11,"Rank":2,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (16 datasets)":64.23,"Classification Average (7 datasets)":59.36,"Clustering Average (3 datasets)":53.61,"PairClassification Average (1 datasets)":64.99,"Reranking Average (1 datasets)":73.08,"Retrieval Average (2 datasets)":76.78,"STS Average (2 datasets)":79.85} -{"index":15,"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (16 datasets)":63.36,"Classification Average (7 datasets)":58.92,"Clustering Average (3 datasets)":52.55,"PairClassification Average (1 datasets)":58.4,"Reranking Average (1 datasets)":75.58,"Retrieval Average (2 datasets)":77.39,"STS Average (2 datasets)":77.48} -{"index":0,"Rank":4,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (16 datasets)":62.69,"Classification Average (7 datasets)":57.43,"Clustering Average (3 datasets)":52.51,"PairClassification Average (1 datasets)":60.6,"Reranking Average (1 datasets)":74.02,"Retrieval Average (2 datasets)":77.1,"STS Average (2 datasets)":77.39} -{"index":10,"Rank":5,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":61.26,"Classification Average (7 datasets)":57.86,"Clustering Average (3 datasets)":53.42,"PairClassification Average (1 datasets)":60.02,"Reranking Average (1 datasets)":64.42,"Retrieval Average (2 datasets)":67.34,"STS Average (2 datasets)":77.91} -{"index":23,"Rank":6,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":60.74,"Classification Average (7 datasets)":56.55,"Clustering Average (3 datasets)":53.22,"PairClassification Average (1 datasets)":57.81,"Reranking Average (1 datasets)":68.65,"Retrieval Average (2 datasets)":67.54,"STS Average (2 datasets)":77.37} -{"index":14,"Rank":7,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (16 datasets)":60.04,"Classification Average (7 datasets)":56.19,"Clustering Average (3 datasets)":50.27,"PairClassification Average (1 datasets)":54.96,"Reranking Average (1 datasets)":72.01,"Retrieval Average (2 datasets)":69.91,"STS Average (2 datasets)":74.9} -{"index":16,"Rank":8,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (16 datasets)":59.64,"Classification Average (7 datasets)":55.09,"Clustering Average (3 datasets)":51.65,"PairClassification Average (1 datasets)":55.14,"Reranking Average (1 datasets)":71.46,"Retrieval Average (2 datasets)":69.27,"STS Average (2 datasets)":74.27} -{"index":24,"Rank":9,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":312,"Max Tokens":2048,"Average (16 datasets)":55.92,"Classification Average (7 datasets)":53.46,"Clustering Average (3 datasets)":49.57,"PairClassification Average (1 datasets)":56.09,"Reranking Average (1 datasets)":62.15,"Retrieval Average (2 datasets)":51.5,"STS Average (2 datasets)":75.32} -{"index":22,"Rank":10,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (16 datasets)":55.42,"Classification Average (7 datasets)":54.11,"Clustering Average (3 datasets)":49.18,"PairClassification Average (1 datasets)":64.57,"Reranking Average (1 datasets)":58.77,"Retrieval Average (2 datasets)":44.4,"STS Average (2 datasets)":74.1} -{"index":5,"Rank":11,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":514,"Average (16 datasets)":52.61,"Classification Average (7 datasets)":55.44,"Clustering Average (3 datasets)":52.65,"PairClassification Average (1 datasets)":51.97,"Reranking Average (1 datasets)":56.13,"Retrieval Average (2 datasets)":25.6,"STS Average (2 datasets)":68.19} -{"index":17,"Rank":12,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":52.02,"Classification Average (7 datasets)":52.35,"Clustering Average (3 datasets)":47.75,"PairClassification Average (1 datasets)":55.71,"Reranking Average (1 datasets)":55.13,"Retrieval Average (2 datasets)":36.38,"STS Average (2 datasets)":69.54} -{"index":21,"Rank":13,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (16 datasets)":51.89,"Classification Average (7 datasets)":51.38,"Clustering Average (3 datasets)":47.75,"PairClassification Average (1 datasets)":58.56,"Reranking Average (1 datasets)":52.8,"Retrieval Average (2 datasets)":37.26,"STS Average (2 datasets)":70.71} -{"index":7,"Rank":14,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":51.44,"Classification Average (7 datasets)":52.73,"Clustering Average (3 datasets)":46.84,"PairClassification Average (1 datasets)":55.61,"Reranking Average (1 datasets)":54.83,"Retrieval Average (2 datasets)":31.88,"STS Average (2 datasets)":69.6} -{"index":6,"Rank":15,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":512,"Average (16 datasets)":48.98,"Classification Average (7 datasets)":55.21,"Clustering Average (3 datasets)":51.94,"PairClassification Average (1 datasets)":50.17,"Reranking Average (1 datasets)":46.81,"Retrieval Average (2 datasets)":11.78,"STS Average (2 datasets)":60.44} -{"index":9,"Rank":16,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":2048,"Max Tokens":514,"Average (16 datasets)":46.28,"Classification Average (7 datasets)":51.37,"Clustering Average (3 datasets)":41.23,"PairClassification Average (1 datasets)":51.87,"Reranking Average (1 datasets)":46.09,"Retrieval Average (2 datasets)":12.4,"STS Average (2 datasets)":67.28} -{"index":3,"Rank":17,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":46.28,"Classification Average (7 datasets)":51.49,"Clustering Average (3 datasets)":43.13,"PairClassification Average (1 datasets)":59.12,"Reranking Average (1 datasets)":39.89,"Retrieval Average (2 datasets)":9.68,"STS Average (2 datasets)":66.13} -{"index":12,"Rank":18,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":45.23,"Classification Average (7 datasets)":54.23,"Clustering Average (3 datasets)":42.92,"PairClassification Average (1 datasets)":53.78,"Reranking Average (1 datasets)":34.01,"Retrieval Average (2 datasets)":7.5,"STS Average (2 datasets)":56.25} -{"index":1,"Rank":19,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":44.18,"Classification Average (7 datasets)":52.16,"Clustering Average (3 datasets)":38.41,"PairClassification Average (1 datasets)":52.48,"Reranking Average (1 datasets)":42.58,"Retrieval Average (2 datasets)":7.37,"STS Average (2 datasets)":58.36} -{"index":2,"Rank":20,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":40.68,"Classification Average (7 datasets)":50.66,"Clustering Average (3 datasets)":27.91,"PairClassification Average (1 datasets)":52.12,"Reranking Average (1 datasets)":41.65,"Retrieval Average (2 datasets)":7.55,"STS Average (2 datasets)":51.84} -{"index":8,"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Embedding Dimensions":512,"Max Tokens":514,"Average (16 datasets)":37.07,"Classification Average (7 datasets)":42.68,"Clustering Average (3 datasets)":30.76,"PairClassification Average (1 datasets)":51.06,"Reranking Average (1 datasets)":35.44,"Retrieval Average (2 datasets)":2.02,"STS Average (2 datasets)":55.78} -{"index":19,"Rank":22,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (16 datasets)":25.97,"Classification Average (7 datasets)":28.67,"Clustering Average (3 datasets)":13.3,"PairClassification Average (1 datasets)":45.03,"Reranking Average (1 datasets)":27.05,"Retrieval Average (2 datasets)":1.66,"STS Average (2 datasets)":49.74} -{"index":4,"Rank":23,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Embedding Dimensions":"","Max Tokens":4096,"Average (16 datasets)":"","Classification Average (7 datasets)":58.03,"Clustering Average (3 datasets)":61.91,"PairClassification Average (1 datasets)":59.39,"Reranking Average (1 datasets)":72.41,"Retrieval Average (2 datasets)":"","STS Average (2 datasets)":73.67} -{"index":18,"Rank":24,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average (16 datasets)":"","Classification Average (7 datasets)":28.33,"Clustering Average (3 datasets)":13.87,"PairClassification Average (1 datasets)":46.4,"Reranking Average (1 datasets)":38.51,"Retrieval Average (2 datasets)":"","STS Average (2 datasets)":50.9} -{"index":20,"Rank":25,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average (16 datasets)":"","Classification Average (7 datasets)":29.53,"Clustering Average (3 datasets)":15.83,"PairClassification Average (1 datasets)":44.52,"Reranking Average (1 datasets)":30.96,"Retrieval Average (2 datasets)":"","STS Average (2 datasets)":48.92} +{"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (16 datasets)":67.64,"Classification Average (7 datasets)":64.57,"Clustering Average (3 datasets)":59.98,"PairClassification Average (1 datasets)":59.38,"Reranking Average (1 datasets)":74.61,"Retrieval Average (2 datasets)":77.96,"STS Average (2 datasets)":80.15} +{"Rank":2,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (16 datasets)":64.23,"Classification Average (7 datasets)":59.36,"Clustering Average (3 datasets)":53.61,"PairClassification Average (1 datasets)":64.99,"Reranking Average (1 datasets)":73.08,"Retrieval Average (2 datasets)":76.78,"STS Average (2 datasets)":79.85} +{"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (16 datasets)":63.36,"Classification Average (7 datasets)":58.92,"Clustering Average (3 datasets)":52.55,"PairClassification Average (1 datasets)":58.4,"Reranking Average (1 datasets)":75.58,"Retrieval Average (2 datasets)":77.39,"STS Average (2 datasets)":77.48} +{"Rank":4,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (16 datasets)":62.69,"Classification Average (7 datasets)":57.43,"Clustering Average (3 datasets)":52.51,"PairClassification Average (1 datasets)":60.6,"Reranking Average (1 datasets)":74.02,"Retrieval Average (2 datasets)":77.1,"STS Average (2 datasets)":77.39} +{"Rank":5,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":61.26,"Classification Average (7 datasets)":57.86,"Clustering Average (3 datasets)":53.42,"PairClassification Average (1 datasets)":60.02,"Reranking Average (1 datasets)":64.42,"Retrieval Average (2 datasets)":67.34,"STS Average (2 datasets)":77.91} +{"Rank":6,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":60.74,"Classification Average (7 datasets)":56.55,"Clustering Average (3 datasets)":53.22,"PairClassification Average (1 datasets)":57.81,"Reranking Average (1 datasets)":68.65,"Retrieval Average (2 datasets)":67.54,"STS Average (2 datasets)":77.37} +{"Rank":7,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (16 datasets)":60.04,"Classification Average (7 datasets)":56.19,"Clustering Average (3 datasets)":50.27,"PairClassification Average (1 datasets)":54.96,"Reranking Average (1 datasets)":72.01,"Retrieval Average (2 datasets)":69.91,"STS Average (2 datasets)":74.9} +{"Rank":8,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (16 datasets)":59.64,"Classification Average (7 datasets)":55.09,"Clustering Average (3 datasets)":51.65,"PairClassification Average (1 datasets)":55.14,"Reranking Average (1 datasets)":71.46,"Retrieval Average (2 datasets)":69.27,"STS Average (2 datasets)":74.27} +{"Rank":9,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":312,"Max Tokens":2048,"Average (16 datasets)":55.92,"Classification Average (7 datasets)":53.46,"Clustering Average (3 datasets)":49.57,"PairClassification Average (1 datasets)":56.09,"Reranking Average (1 datasets)":62.15,"Retrieval Average (2 datasets)":51.5,"STS Average (2 datasets)":75.32} +{"Rank":10,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (16 datasets)":55.42,"Classification Average (7 datasets)":54.11,"Clustering Average (3 datasets)":49.18,"PairClassification Average (1 datasets)":64.57,"Reranking Average (1 datasets)":58.77,"Retrieval Average (2 datasets)":44.4,"STS Average (2 datasets)":74.1} +{"Rank":11,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":514,"Average (16 datasets)":52.61,"Classification Average (7 datasets)":55.44,"Clustering Average (3 datasets)":52.65,"PairClassification Average (1 datasets)":51.97,"Reranking Average (1 datasets)":56.13,"Retrieval Average (2 datasets)":25.6,"STS Average (2 datasets)":68.19} +{"Rank":12,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":52.02,"Classification Average (7 datasets)":52.35,"Clustering Average (3 datasets)":47.75,"PairClassification Average (1 datasets)":55.71,"Reranking Average (1 datasets)":55.13,"Retrieval Average (2 datasets)":36.38,"STS Average (2 datasets)":69.54} +{"Rank":13,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (16 datasets)":51.89,"Classification Average (7 datasets)":51.38,"Clustering Average (3 datasets)":47.75,"PairClassification Average (1 datasets)":58.56,"Reranking Average (1 datasets)":52.8,"Retrieval Average (2 datasets)":37.26,"STS Average (2 datasets)":70.71} +{"Rank":14,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":51.44,"Classification Average (7 datasets)":52.73,"Clustering Average (3 datasets)":46.84,"PairClassification Average (1 datasets)":55.61,"Reranking Average (1 datasets)":54.83,"Retrieval Average (2 datasets)":31.88,"STS Average (2 datasets)":69.6} +{"Rank":15,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":512,"Average (16 datasets)":48.98,"Classification Average (7 datasets)":55.21,"Clustering Average (3 datasets)":51.94,"PairClassification Average (1 datasets)":50.17,"Reranking Average (1 datasets)":46.81,"Retrieval Average (2 datasets)":11.78,"STS Average (2 datasets)":60.44} +{"Rank":16,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":2048,"Max Tokens":514,"Average (16 datasets)":46.28,"Classification Average (7 datasets)":51.37,"Clustering Average (3 datasets)":41.23,"PairClassification Average (1 datasets)":51.87,"Reranking Average (1 datasets)":46.09,"Retrieval Average (2 datasets)":12.4,"STS Average (2 datasets)":67.28} +{"Rank":17,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":46.28,"Classification Average (7 datasets)":51.49,"Clustering Average (3 datasets)":43.13,"PairClassification Average (1 datasets)":59.12,"Reranking Average (1 datasets)":39.89,"Retrieval Average (2 datasets)":9.68,"STS Average (2 datasets)":66.13} +{"Rank":18,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":45.23,"Classification Average (7 datasets)":54.23,"Clustering Average (3 datasets)":42.92,"PairClassification Average (1 datasets)":53.78,"Reranking Average (1 datasets)":34.01,"Retrieval Average (2 datasets)":7.5,"STS Average (2 datasets)":56.25} +{"Rank":19,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":44.18,"Classification Average (7 datasets)":52.16,"Clustering Average (3 datasets)":38.41,"PairClassification Average (1 datasets)":52.48,"Reranking Average (1 datasets)":42.58,"Retrieval Average (2 datasets)":7.37,"STS Average (2 datasets)":58.36} +{"Rank":20,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":40.68,"Classification Average (7 datasets)":50.66,"Clustering Average (3 datasets)":27.91,"PairClassification Average (1 datasets)":52.12,"Reranking Average (1 datasets)":41.65,"Retrieval Average (2 datasets)":7.55,"STS Average (2 datasets)":51.84} +{"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Embedding Dimensions":512,"Max Tokens":514,"Average (16 datasets)":37.07,"Classification Average (7 datasets)":42.68,"Clustering Average (3 datasets)":30.76,"PairClassification Average (1 datasets)":51.06,"Reranking Average (1 datasets)":35.44,"Retrieval Average (2 datasets)":2.02,"STS Average (2 datasets)":55.78} +{"Rank":22,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (16 datasets)":25.97,"Classification Average (7 datasets)":28.67,"Clustering Average (3 datasets)":13.3,"PairClassification Average (1 datasets)":45.03,"Reranking Average (1 datasets)":27.05,"Retrieval Average (2 datasets)":1.66,"STS Average (2 datasets)":49.74} +{"Rank":23,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Embedding Dimensions":"","Max Tokens":4096,"Average (16 datasets)":"","Classification Average (7 datasets)":58.03,"Clustering Average (3 datasets)":61.91,"PairClassification Average (1 datasets)":59.39,"Reranking Average (1 datasets)":72.41,"Retrieval Average (2 datasets)":"","STS Average (2 datasets)":73.67} +{"Rank":24,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average (16 datasets)":"","Classification Average (7 datasets)":28.33,"Clustering Average (3 datasets)":13.87,"PairClassification Average (1 datasets)":46.4,"Reranking Average (1 datasets)":38.51,"Retrieval Average (2 datasets)":"","STS Average (2 datasets)":50.9} +{"Rank":25,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average (16 datasets)":"","Classification Average (7 datasets)":29.53,"Clustering Average (3 datasets)":15.83,"PairClassification Average (1 datasets)":44.52,"Reranking Average (1 datasets)":30.96,"Retrieval Average (2 datasets)":"","STS Average (2 datasets)":48.92} diff --git a/boards_data/ru/data_tasks/Classification/default.jsonl b/boards_data/ru/data_tasks/Classification/default.jsonl index 879d8af17c912e750b8a6683d0a25e7fe0f19db1..7524e444fbdcb67ff3fe4e71d3324975575bb76a 100644 --- a/boards_data/ru/data_tasks/Classification/default.jsonl +++ b/boards_data/ru/data_tasks/Classification/default.jsonl @@ -1,25 +1,25 @@ -{"index":13,"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":64.57,"GeoreviewClassification (rus-Cyrl)":50.25,"HeadlineClassification (rus-Cyrl)":85.68,"InappropriatenessClassification (rus-Cyrl)":67.19,"KinopoiskClassification (rus-Cyrl)":65.49,"RuReviewsClassification (rus-Cyrl)":67.68,"RuSciBenchGRNTIClassification (rus-Cyrl)":64.59,"RuSciBenchOECDClassification (rus-Cyrl)":51.13} -{"index":11,"Rank":2,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":59.36,"GeoreviewClassification (rus-Cyrl)":50.98,"HeadlineClassification (rus-Cyrl)":70.09,"InappropriatenessClassification (rus-Cyrl)":60.76,"KinopoiskClassification (rus-Cyrl)":63.33,"RuReviewsClassification (rus-Cyrl)":68.52,"RuSciBenchGRNTIClassification (rus-Cyrl)":57.67,"RuSciBenchOECDClassification (rus-Cyrl)":44.2} -{"index":15,"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":58.92,"GeoreviewClassification (rus-Cyrl)":49.69,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":61.6,"KinopoiskClassification (rus-Cyrl)":56.59,"RuReviewsClassification (rus-Cyrl)":65.28,"RuSciBenchGRNTIClassification (rus-Cyrl)":58.2,"RuSciBenchOECDClassification (rus-Cyrl)":43.91} -{"index":4,"Rank":4,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":58.03,"GeoreviewClassification (rus-Cyrl)":45.72,"HeadlineClassification (rus-Cyrl)":78.05,"InappropriatenessClassification (rus-Cyrl)":60.11,"KinopoiskClassification (rus-Cyrl)":56.14,"RuReviewsClassification (rus-Cyrl)":61.42,"RuSciBenchGRNTIClassification (rus-Cyrl)":58.93,"RuSciBenchOECDClassification (rus-Cyrl)":45.83} -{"index":10,"Rank":5,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":57.86,"GeoreviewClassification (rus-Cyrl)":47.23,"HeadlineClassification (rus-Cyrl)":74.88,"InappropriatenessClassification (rus-Cyrl)":61.94,"KinopoiskClassification (rus-Cyrl)":55.69,"RuReviewsClassification (rus-Cyrl)":66.44,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.55,"RuSciBenchOECDClassification (rus-Cyrl)":43.28} -{"index":0,"Rank":6,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":57.43,"GeoreviewClassification (rus-Cyrl)":48.27,"HeadlineClassification (rus-Cyrl)":70.32,"InappropriatenessClassification (rus-Cyrl)":59.87,"KinopoiskClassification (rus-Cyrl)":58.23,"RuReviewsClassification (rus-Cyrl)":66.91,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.81,"RuSciBenchOECDClassification (rus-Cyrl)":42.57} -{"index":23,"Rank":7,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":56.55,"GeoreviewClassification (rus-Cyrl)":46.04,"HeadlineClassification (rus-Cyrl)":69.98,"InappropriatenessClassification (rus-Cyrl)":61.39,"KinopoiskClassification (rus-Cyrl)":53.59,"RuReviewsClassification (rus-Cyrl)":64.58,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.67,"RuSciBenchOECDClassification (rus-Cyrl)":43.58} -{"index":14,"Rank":8,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":56.19,"GeoreviewClassification (rus-Cyrl)":46.05,"HeadlineClassification (rus-Cyrl)":75.64,"InappropriatenessClassification (rus-Cyrl)":58.78,"KinopoiskClassification (rus-Cyrl)":50.89,"RuReviewsClassification (rus-Cyrl)":62.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.28,"RuSciBenchOECDClassification (rus-Cyrl)":42.69} -{"index":5,"Rank":9,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":55.44,"GeoreviewClassification (rus-Cyrl)":39.67,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":64.64,"KinopoiskClassification (rus-Cyrl)":50.33,"RuReviewsClassification (rus-Cyrl)":58.29,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.19,"RuSciBenchOECDClassification (rus-Cyrl)":43.8} -{"index":6,"Rank":10,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":55.21,"GeoreviewClassification (rus-Cyrl)":39.97,"HeadlineClassification (rus-Cyrl)":79.26,"InappropriatenessClassification (rus-Cyrl)":62.52,"KinopoiskClassification (rus-Cyrl)":49.51,"RuReviewsClassification (rus-Cyrl)":58.27,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.9,"RuSciBenchOECDClassification (rus-Cyrl)":43.04} -{"index":16,"Rank":11,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":55.09,"GeoreviewClassification (rus-Cyrl)":44.66,"HeadlineClassification (rus-Cyrl)":73.94,"InappropriatenessClassification (rus-Cyrl)":59.16,"KinopoiskClassification (rus-Cyrl)":49.96,"RuReviewsClassification (rus-Cyrl)":61.18,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.99,"RuSciBenchOECDClassification (rus-Cyrl)":41.72} -{"index":12,"Rank":12,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":54.23,"GeoreviewClassification (rus-Cyrl)":40.19,"HeadlineClassification (rus-Cyrl)":78.75,"InappropriatenessClassification (rus-Cyrl)":61.33,"KinopoiskClassification (rus-Cyrl)":48.78,"RuReviewsClassification (rus-Cyrl)":55.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.53,"RuSciBenchOECDClassification (rus-Cyrl)":41.34} -{"index":22,"Rank":13,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":54.11,"GeoreviewClassification (rus-Cyrl)":42.33,"HeadlineClassification (rus-Cyrl)":70.35,"InappropriatenessClassification (rus-Cyrl)":59.32,"KinopoiskClassification (rus-Cyrl)":44.31,"RuReviewsClassification (rus-Cyrl)":62.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.01,"RuSciBenchOECDClassification (rus-Cyrl)":44.14} -{"index":24,"Rank":14,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":53.46,"GeoreviewClassification (rus-Cyrl)":41.36,"HeadlineClassification (rus-Cyrl)":68.9,"InappropriatenessClassification (rus-Cyrl)":59.11,"KinopoiskClassification (rus-Cyrl)":50.47,"RuReviewsClassification (rus-Cyrl)":60.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.93,"RuSciBenchOECDClassification (rus-Cyrl)":40.79} -{"index":7,"Rank":15,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":52.73,"GeoreviewClassification (rus-Cyrl)":40.89,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.48,"KinopoiskClassification (rus-Cyrl)":49.85,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.8,"RuSciBenchOECDClassification (rus-Cyrl)":40.36} -{"index":17,"Rank":16,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":52.35,"GeoreviewClassification (rus-Cyrl)":40.86,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.52,"KinopoiskClassification (rus-Cyrl)":46.77,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.04,"RuSciBenchOECDClassification (rus-Cyrl)":40.48} -{"index":1,"Rank":17,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":52.16,"GeoreviewClassification (rus-Cyrl)":38.95,"HeadlineClassification (rus-Cyrl)":75.59,"InappropriatenessClassification (rus-Cyrl)":60.68,"KinopoiskClassification (rus-Cyrl)":49.67,"RuReviewsClassification (rus-Cyrl)":54.05,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.53,"RuSciBenchOECDClassification (rus-Cyrl)":37.65} -{"index":3,"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.49,"GeoreviewClassification (rus-Cyrl)":38.05,"HeadlineClassification (rus-Cyrl)":67.64,"InappropriatenessClassification (rus-Cyrl)":58.27,"KinopoiskClassification (rus-Cyrl)":45.86,"RuReviewsClassification (rus-Cyrl)":58.34,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.18,"RuSciBenchOECDClassification (rus-Cyrl)":40.11} -{"index":21,"Rank":19,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":51.38,"GeoreviewClassification (rus-Cyrl)":38.24,"HeadlineClassification (rus-Cyrl)":68.3,"InappropriatenessClassification (rus-Cyrl)":58.18,"KinopoiskClassification (rus-Cyrl)":41.45,"RuReviewsClassification (rus-Cyrl)":58.88,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.19,"RuSciBenchOECDClassification (rus-Cyrl)":41.41} -{"index":9,"Rank":20,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":51.37,"GeoreviewClassification (rus-Cyrl)":39.64,"HeadlineClassification (rus-Cyrl)":74.19,"InappropriatenessClassification (rus-Cyrl)":58.57,"KinopoiskClassification (rus-Cyrl)":49.06,"RuReviewsClassification (rus-Cyrl)":56.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":45.63,"RuSciBenchOECDClassification (rus-Cyrl)":35.48} -{"index":2,"Rank":21,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":50.66,"GeoreviewClassification (rus-Cyrl)":37.22,"HeadlineClassification (rus-Cyrl)":75.23,"InappropriatenessClassification (rus-Cyrl)":57.34,"KinopoiskClassification (rus-Cyrl)":49.91,"RuReviewsClassification (rus-Cyrl)":50.74,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.03,"RuSciBenchOECDClassification (rus-Cyrl)":36.13} -{"index":8,"Rank":22,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":42.68,"GeoreviewClassification (rus-Cyrl)":33.45,"HeadlineClassification (rus-Cyrl)":57.65,"InappropriatenessClassification (rus-Cyrl)":54.5,"KinopoiskClassification (rus-Cyrl)":41.36,"RuReviewsClassification (rus-Cyrl)":49.56,"RuSciBenchGRNTIClassification (rus-Cyrl)":35.71,"RuSciBenchOECDClassification (rus-Cyrl)":26.51} -{"index":20,"Rank":23,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":29.53,"GeoreviewClassification (rus-Cyrl)":25.93,"HeadlineClassification (rus-Cyrl)":28.53,"InappropriatenessClassification (rus-Cyrl)":51.82,"KinopoiskClassification (rus-Cyrl)":34.18,"RuReviewsClassification (rus-Cyrl)":42.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":13.29,"RuSciBenchOECDClassification (rus-Cyrl)":10.62} -{"index":19,"Rank":24,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":28.67,"GeoreviewClassification (rus-Cyrl)":27.08,"HeadlineClassification (rus-Cyrl)":27.77,"InappropriatenessClassification (rus-Cyrl)":51.73,"KinopoiskClassification (rus-Cyrl)":33.93,"RuReviewsClassification (rus-Cyrl)":41.79,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.08,"RuSciBenchOECDClassification (rus-Cyrl)":8.3} -{"index":18,"Rank":25,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":28.33,"GeoreviewClassification (rus-Cyrl)":23.49,"HeadlineClassification (rus-Cyrl)":28.49,"InappropriatenessClassification (rus-Cyrl)":50.85,"KinopoiskClassification (rus-Cyrl)":34.17,"RuReviewsClassification (rus-Cyrl)":42.49,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.49,"RuSciBenchOECDClassification (rus-Cyrl)":8.31} +{"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":64.57,"GeoreviewClassification (rus-Cyrl)":50.25,"HeadlineClassification (rus-Cyrl)":85.68,"InappropriatenessClassification (rus-Cyrl)":67.19,"KinopoiskClassification (rus-Cyrl)":65.49,"RuReviewsClassification (rus-Cyrl)":67.68,"RuSciBenchGRNTIClassification (rus-Cyrl)":64.59,"RuSciBenchOECDClassification (rus-Cyrl)":51.13} +{"Rank":2,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":59.36,"GeoreviewClassification (rus-Cyrl)":50.98,"HeadlineClassification (rus-Cyrl)":70.09,"InappropriatenessClassification (rus-Cyrl)":60.76,"KinopoiskClassification (rus-Cyrl)":63.33,"RuReviewsClassification (rus-Cyrl)":68.52,"RuSciBenchGRNTIClassification (rus-Cyrl)":57.67,"RuSciBenchOECDClassification (rus-Cyrl)":44.2} +{"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":58.92,"GeoreviewClassification (rus-Cyrl)":49.69,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":61.6,"KinopoiskClassification (rus-Cyrl)":56.59,"RuReviewsClassification (rus-Cyrl)":65.28,"RuSciBenchGRNTIClassification (rus-Cyrl)":58.2,"RuSciBenchOECDClassification (rus-Cyrl)":43.91} +{"Rank":4,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":58.03,"GeoreviewClassification (rus-Cyrl)":45.72,"HeadlineClassification (rus-Cyrl)":78.05,"InappropriatenessClassification (rus-Cyrl)":60.11,"KinopoiskClassification (rus-Cyrl)":56.14,"RuReviewsClassification (rus-Cyrl)":61.42,"RuSciBenchGRNTIClassification (rus-Cyrl)":58.93,"RuSciBenchOECDClassification (rus-Cyrl)":45.83} +{"Rank":5,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":57.86,"GeoreviewClassification (rus-Cyrl)":47.23,"HeadlineClassification (rus-Cyrl)":74.88,"InappropriatenessClassification (rus-Cyrl)":61.94,"KinopoiskClassification (rus-Cyrl)":55.69,"RuReviewsClassification (rus-Cyrl)":66.44,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.55,"RuSciBenchOECDClassification (rus-Cyrl)":43.28} +{"Rank":6,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":57.43,"GeoreviewClassification (rus-Cyrl)":48.27,"HeadlineClassification (rus-Cyrl)":70.32,"InappropriatenessClassification (rus-Cyrl)":59.87,"KinopoiskClassification (rus-Cyrl)":58.23,"RuReviewsClassification (rus-Cyrl)":66.91,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.81,"RuSciBenchOECDClassification (rus-Cyrl)":42.57} +{"Rank":7,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":56.55,"GeoreviewClassification (rus-Cyrl)":46.04,"HeadlineClassification (rus-Cyrl)":69.98,"InappropriatenessClassification (rus-Cyrl)":61.39,"KinopoiskClassification (rus-Cyrl)":53.59,"RuReviewsClassification (rus-Cyrl)":64.58,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.67,"RuSciBenchOECDClassification (rus-Cyrl)":43.58} +{"Rank":8,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":56.19,"GeoreviewClassification (rus-Cyrl)":46.05,"HeadlineClassification (rus-Cyrl)":75.64,"InappropriatenessClassification (rus-Cyrl)":58.78,"KinopoiskClassification (rus-Cyrl)":50.89,"RuReviewsClassification (rus-Cyrl)":62.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.28,"RuSciBenchOECDClassification (rus-Cyrl)":42.69} +{"Rank":9,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":55.44,"GeoreviewClassification (rus-Cyrl)":39.67,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":64.64,"KinopoiskClassification (rus-Cyrl)":50.33,"RuReviewsClassification (rus-Cyrl)":58.29,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.19,"RuSciBenchOECDClassification (rus-Cyrl)":43.8} +{"Rank":10,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":55.21,"GeoreviewClassification (rus-Cyrl)":39.97,"HeadlineClassification (rus-Cyrl)":79.26,"InappropriatenessClassification (rus-Cyrl)":62.52,"KinopoiskClassification (rus-Cyrl)":49.51,"RuReviewsClassification (rus-Cyrl)":58.27,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.9,"RuSciBenchOECDClassification (rus-Cyrl)":43.04} +{"Rank":11,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":55.09,"GeoreviewClassification (rus-Cyrl)":44.66,"HeadlineClassification (rus-Cyrl)":73.94,"InappropriatenessClassification (rus-Cyrl)":59.16,"KinopoiskClassification (rus-Cyrl)":49.96,"RuReviewsClassification (rus-Cyrl)":61.18,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.99,"RuSciBenchOECDClassification (rus-Cyrl)":41.72} +{"Rank":12,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":54.23,"GeoreviewClassification (rus-Cyrl)":40.19,"HeadlineClassification (rus-Cyrl)":78.75,"InappropriatenessClassification (rus-Cyrl)":61.33,"KinopoiskClassification (rus-Cyrl)":48.78,"RuReviewsClassification (rus-Cyrl)":55.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.53,"RuSciBenchOECDClassification (rus-Cyrl)":41.34} +{"Rank":13,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":54.11,"GeoreviewClassification (rus-Cyrl)":42.33,"HeadlineClassification (rus-Cyrl)":70.35,"InappropriatenessClassification (rus-Cyrl)":59.32,"KinopoiskClassification (rus-Cyrl)":44.31,"RuReviewsClassification (rus-Cyrl)":62.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.01,"RuSciBenchOECDClassification (rus-Cyrl)":44.14} +{"Rank":14,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":53.46,"GeoreviewClassification (rus-Cyrl)":41.36,"HeadlineClassification (rus-Cyrl)":68.9,"InappropriatenessClassification (rus-Cyrl)":59.11,"KinopoiskClassification (rus-Cyrl)":50.47,"RuReviewsClassification (rus-Cyrl)":60.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.93,"RuSciBenchOECDClassification (rus-Cyrl)":40.79} +{"Rank":15,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":52.73,"GeoreviewClassification (rus-Cyrl)":40.89,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.48,"KinopoiskClassification (rus-Cyrl)":49.85,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.8,"RuSciBenchOECDClassification (rus-Cyrl)":40.36} +{"Rank":16,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":52.35,"GeoreviewClassification (rus-Cyrl)":40.86,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.52,"KinopoiskClassification (rus-Cyrl)":46.77,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.04,"RuSciBenchOECDClassification (rus-Cyrl)":40.48} +{"Rank":17,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":52.16,"GeoreviewClassification (rus-Cyrl)":38.95,"HeadlineClassification (rus-Cyrl)":75.59,"InappropriatenessClassification (rus-Cyrl)":60.68,"KinopoiskClassification (rus-Cyrl)":49.67,"RuReviewsClassification (rus-Cyrl)":54.05,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.53,"RuSciBenchOECDClassification (rus-Cyrl)":37.65} +{"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.49,"GeoreviewClassification (rus-Cyrl)":38.05,"HeadlineClassification (rus-Cyrl)":67.64,"InappropriatenessClassification (rus-Cyrl)":58.27,"KinopoiskClassification (rus-Cyrl)":45.86,"RuReviewsClassification (rus-Cyrl)":58.34,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.18,"RuSciBenchOECDClassification (rus-Cyrl)":40.11} +{"Rank":19,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":51.38,"GeoreviewClassification (rus-Cyrl)":38.24,"HeadlineClassification (rus-Cyrl)":68.3,"InappropriatenessClassification (rus-Cyrl)":58.18,"KinopoiskClassification (rus-Cyrl)":41.45,"RuReviewsClassification (rus-Cyrl)":58.88,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.19,"RuSciBenchOECDClassification (rus-Cyrl)":41.41} +{"Rank":20,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":51.37,"GeoreviewClassification (rus-Cyrl)":39.64,"HeadlineClassification (rus-Cyrl)":74.19,"InappropriatenessClassification (rus-Cyrl)":58.57,"KinopoiskClassification (rus-Cyrl)":49.06,"RuReviewsClassification (rus-Cyrl)":56.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":45.63,"RuSciBenchOECDClassification (rus-Cyrl)":35.48} +{"Rank":21,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":50.66,"GeoreviewClassification (rus-Cyrl)":37.22,"HeadlineClassification (rus-Cyrl)":75.23,"InappropriatenessClassification (rus-Cyrl)":57.34,"KinopoiskClassification (rus-Cyrl)":49.91,"RuReviewsClassification (rus-Cyrl)":50.74,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.03,"RuSciBenchOECDClassification (rus-Cyrl)":36.13} +{"Rank":22,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":42.68,"GeoreviewClassification (rus-Cyrl)":33.45,"HeadlineClassification (rus-Cyrl)":57.65,"InappropriatenessClassification (rus-Cyrl)":54.5,"KinopoiskClassification (rus-Cyrl)":41.36,"RuReviewsClassification (rus-Cyrl)":49.56,"RuSciBenchGRNTIClassification (rus-Cyrl)":35.71,"RuSciBenchOECDClassification (rus-Cyrl)":26.51} +{"Rank":23,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":29.53,"GeoreviewClassification (rus-Cyrl)":25.93,"HeadlineClassification (rus-Cyrl)":28.53,"InappropriatenessClassification (rus-Cyrl)":51.82,"KinopoiskClassification (rus-Cyrl)":34.18,"RuReviewsClassification (rus-Cyrl)":42.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":13.29,"RuSciBenchOECDClassification (rus-Cyrl)":10.62} +{"Rank":24,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":28.67,"GeoreviewClassification (rus-Cyrl)":27.08,"HeadlineClassification (rus-Cyrl)":27.77,"InappropriatenessClassification (rus-Cyrl)":51.73,"KinopoiskClassification (rus-Cyrl)":33.93,"RuReviewsClassification (rus-Cyrl)":41.79,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.08,"RuSciBenchOECDClassification (rus-Cyrl)":8.3} +{"Rank":25,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":28.33,"GeoreviewClassification (rus-Cyrl)":23.49,"HeadlineClassification (rus-Cyrl)":28.49,"InappropriatenessClassification (rus-Cyrl)":50.85,"KinopoiskClassification (rus-Cyrl)":34.17,"RuReviewsClassification (rus-Cyrl)":42.49,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.49,"RuSciBenchOECDClassification (rus-Cyrl)":8.31} diff --git a/boards_data/ru/data_tasks/Clustering/default.jsonl b/boards_data/ru/data_tasks/Clustering/default.jsonl index 744caacf00658c025795e3ac934aa99a9faf851b..f653fe2da5ae1a56e6b9e7e468dbe3bc1906bb32 100644 --- a/boards_data/ru/data_tasks/Clustering/default.jsonl +++ b/boards_data/ru/data_tasks/Clustering/default.jsonl @@ -1,25 +1,25 @@ -{"index":4,"Rank":1,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":61.91,"GeoreviewClusteringP2P (rus-Cyrl)":74.06,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":60.01,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":51.66} -{"index":13,"Rank":2,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":59.98,"GeoreviewClusteringP2P (rus-Cyrl)":65.68,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":61.55,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":52.72} -{"index":11,"Rank":3,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":53.61,"GeoreviewClusteringP2P (rus-Cyrl)":62.79,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":53.11,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.93} -{"index":10,"Rank":4,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":53.42,"GeoreviewClusteringP2P (rus-Cyrl)":64.16,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":51.38,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.73} -{"index":23,"Rank":5,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":53.22,"GeoreviewClusteringP2P (rus-Cyrl)":64.55,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":50.64,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.48} -{"index":5,"Rank":6,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":52.65,"GeoreviewClusteringP2P (rus-Cyrl)":58.45,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":52.2,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":47.29} -{"index":15,"Rank":7,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":52.55,"GeoreviewClusteringP2P (rus-Cyrl)":60.51,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":52.03,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":45.11} -{"index":0,"Rank":8,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":52.51,"GeoreviewClusteringP2P (rus-Cyrl)":63.75,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":50.57,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":43.21} -{"index":6,"Rank":9,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":51.94,"GeoreviewClusteringP2P (rus-Cyrl)":59.02,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":50.4,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":46.41} -{"index":16,"Rank":10,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":51.65,"GeoreviewClusteringP2P (rus-Cyrl)":58.57,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":51.1,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":45.29} -{"index":14,"Rank":11,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":50.27,"GeoreviewClusteringP2P (rus-Cyrl)":54.46,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":51.56,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.78} -{"index":24,"Rank":12,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":49.57,"GeoreviewClusteringP2P (rus-Cyrl)":59.71,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.55,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.44} -{"index":22,"Rank":13,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":49.18,"GeoreviewClusteringP2P (rus-Cyrl)":56.18,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.47,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":42.9} -{"index":21,"Rank":14,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":53.35,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.22,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.68} -{"index":17,"Rank":15,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":52.19,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":49.09,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.97} -{"index":7,"Rank":16,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":46.84,"GeoreviewClusteringP2P (rus-Cyrl)":51.89,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.48,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.16} -{"index":3,"Rank":17,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":43.13,"GeoreviewClusteringP2P (rus-Cyrl)":41.82,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":46.29,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.28} -{"index":12,"Rank":18,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":42.92,"GeoreviewClusteringP2P (rus-Cyrl)":58.79,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":36.66,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":33.31} -{"index":9,"Rank":19,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":41.23,"GeoreviewClusteringP2P (rus-Cyrl)":44.18,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":41.41,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":38.09} -{"index":1,"Rank":20,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":38.41,"GeoreviewClusteringP2P (rus-Cyrl)":43.26,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":37.84,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":34.12} -{"index":8,"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":30.76,"GeoreviewClusteringP2P (rus-Cyrl)":34.4,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":29.89,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":27.98} -{"index":2,"Rank":22,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":27.91,"GeoreviewClusteringP2P (rus-Cyrl)":28.77,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":28.29,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":26.67} -{"index":20,"Rank":23,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":15.83,"GeoreviewClusteringP2P (rus-Cyrl)":20.33,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":14.66,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":12.49} -{"index":18,"Rank":24,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":13.87,"GeoreviewClusteringP2P (rus-Cyrl)":20.76,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":10.65,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":10.19} -{"index":19,"Rank":25,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":13.3,"GeoreviewClusteringP2P (rus-Cyrl)":20.25,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":10.21,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":9.43} +{"Rank":1,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":61.91,"GeoreviewClusteringP2P (rus-Cyrl)":74.06,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":60.01,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":51.66} +{"Rank":2,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":59.98,"GeoreviewClusteringP2P (rus-Cyrl)":65.68,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":61.55,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":52.72} +{"Rank":3,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":53.61,"GeoreviewClusteringP2P (rus-Cyrl)":62.79,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":53.11,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.93} +{"Rank":4,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":53.42,"GeoreviewClusteringP2P (rus-Cyrl)":64.16,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":51.38,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.73} +{"Rank":5,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":53.22,"GeoreviewClusteringP2P (rus-Cyrl)":64.55,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":50.64,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.48} +{"Rank":6,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":52.65,"GeoreviewClusteringP2P (rus-Cyrl)":58.45,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":52.2,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":47.29} +{"Rank":7,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":52.55,"GeoreviewClusteringP2P (rus-Cyrl)":60.51,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":52.03,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":45.11} +{"Rank":8,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":52.51,"GeoreviewClusteringP2P (rus-Cyrl)":63.75,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":50.57,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":43.21} +{"Rank":9,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":51.94,"GeoreviewClusteringP2P (rus-Cyrl)":59.02,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":50.4,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":46.41} +{"Rank":10,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":51.65,"GeoreviewClusteringP2P (rus-Cyrl)":58.57,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":51.1,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":45.29} +{"Rank":11,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":50.27,"GeoreviewClusteringP2P (rus-Cyrl)":54.46,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":51.56,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.78} +{"Rank":12,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":49.57,"GeoreviewClusteringP2P (rus-Cyrl)":59.71,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.55,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.44} +{"Rank":13,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":49.18,"GeoreviewClusteringP2P (rus-Cyrl)":56.18,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.47,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":42.9} +{"Rank":14,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":53.35,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.22,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.68} +{"Rank":15,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":52.19,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":49.09,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.97} +{"Rank":16,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":46.84,"GeoreviewClusteringP2P (rus-Cyrl)":51.89,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.48,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.16} +{"Rank":17,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":43.13,"GeoreviewClusteringP2P (rus-Cyrl)":41.82,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":46.29,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.28} +{"Rank":18,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":42.92,"GeoreviewClusteringP2P (rus-Cyrl)":58.79,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":36.66,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":33.31} +{"Rank":19,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":41.23,"GeoreviewClusteringP2P (rus-Cyrl)":44.18,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":41.41,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":38.09} +{"Rank":20,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":38.41,"GeoreviewClusteringP2P (rus-Cyrl)":43.26,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":37.84,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":34.12} +{"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":30.76,"GeoreviewClusteringP2P (rus-Cyrl)":34.4,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":29.89,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":27.98} +{"Rank":22,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":27.91,"GeoreviewClusteringP2P (rus-Cyrl)":28.77,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":28.29,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":26.67} +{"Rank":23,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":15.83,"GeoreviewClusteringP2P (rus-Cyrl)":20.33,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":14.66,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":12.49} +{"Rank":24,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":13.87,"GeoreviewClusteringP2P (rus-Cyrl)":20.76,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":10.65,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":10.19} +{"Rank":25,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":13.3,"GeoreviewClusteringP2P (rus-Cyrl)":20.25,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":10.21,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":9.43} diff --git a/boards_data/ru/data_tasks/PairClassification/default.jsonl b/boards_data/ru/data_tasks/PairClassification/default.jsonl index bde37e6f80e6c5dad05f6fdf190aff89ceece039..5f9799c41b32368435c181e147cc8d104986e655 100644 --- a/boards_data/ru/data_tasks/PairClassification/default.jsonl +++ b/boards_data/ru/data_tasks/PairClassification/default.jsonl @@ -1,25 +1,25 @@ -{"index":11,"Rank":1,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"TERRa (rus-Cyrl)":64.99} -{"index":22,"Rank":2,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"TERRa (rus-Cyrl)":64.57} -{"index":0,"Rank":3,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"TERRa (rus-Cyrl)":60.6} -{"index":10,"Rank":4,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"TERRa (rus-Cyrl)":60.02} -{"index":4,"Rank":5,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"TERRa (rus-Cyrl)":59.39} -{"index":13,"Rank":6,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"TERRa (rus-Cyrl)":59.38} -{"index":3,"Rank":7,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"TERRa (rus-Cyrl)":59.12} -{"index":21,"Rank":8,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"TERRa (rus-Cyrl)":58.56} -{"index":15,"Rank":9,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"TERRa (rus-Cyrl)":58.4} -{"index":23,"Rank":10,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"TERRa (rus-Cyrl)":57.81} -{"index":24,"Rank":11,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"TERRa (rus-Cyrl)":56.09} -{"index":17,"Rank":12,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"TERRa (rus-Cyrl)":55.71} -{"index":7,"Rank":13,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"TERRa (rus-Cyrl)":55.61} -{"index":16,"Rank":14,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"TERRa (rus-Cyrl)":55.14} -{"index":14,"Rank":15,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"TERRa (rus-Cyrl)":54.96} -{"index":12,"Rank":16,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"TERRa (rus-Cyrl)":53.78} -{"index":1,"Rank":17,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"TERRa (rus-Cyrl)":52.48} -{"index":2,"Rank":18,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"TERRa (rus-Cyrl)":52.12} -{"index":5,"Rank":19,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"TERRa (rus-Cyrl)":51.97} -{"index":9,"Rank":20,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"TERRa (rus-Cyrl)":51.87} -{"index":8,"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"TERRa (rus-Cyrl)":51.06} -{"index":6,"Rank":22,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"TERRa (rus-Cyrl)":50.17} -{"index":18,"Rank":23,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"TERRa (rus-Cyrl)":46.4} -{"index":19,"Rank":24,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"TERRa (rus-Cyrl)":45.03} -{"index":20,"Rank":25,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"TERRa (rus-Cyrl)":44.52} +{"Rank":1,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"TERRa (rus-Cyrl)":64.99} +{"Rank":2,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"TERRa (rus-Cyrl)":64.57} +{"Rank":3,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"TERRa (rus-Cyrl)":60.6} +{"Rank":4,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"TERRa (rus-Cyrl)":60.02} +{"Rank":5,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"TERRa (rus-Cyrl)":59.39} +{"Rank":6,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"TERRa (rus-Cyrl)":59.38} +{"Rank":7,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"TERRa (rus-Cyrl)":59.12} +{"Rank":8,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"TERRa (rus-Cyrl)":58.56} +{"Rank":9,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"TERRa (rus-Cyrl)":58.4} +{"Rank":10,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"TERRa (rus-Cyrl)":57.81} +{"Rank":11,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"TERRa (rus-Cyrl)":56.09} +{"Rank":12,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"TERRa (rus-Cyrl)":55.71} +{"Rank":13,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"TERRa (rus-Cyrl)":55.61} +{"Rank":14,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"TERRa (rus-Cyrl)":55.14} +{"Rank":15,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"TERRa (rus-Cyrl)":54.96} +{"Rank":16,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"TERRa (rus-Cyrl)":53.78} +{"Rank":17,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"TERRa (rus-Cyrl)":52.48} +{"Rank":18,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"TERRa (rus-Cyrl)":52.12} +{"Rank":19,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"TERRa (rus-Cyrl)":51.97} +{"Rank":20,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"TERRa (rus-Cyrl)":51.87} +{"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"TERRa (rus-Cyrl)":51.06} +{"Rank":22,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"TERRa (rus-Cyrl)":50.17} +{"Rank":23,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"TERRa (rus-Cyrl)":46.4} +{"Rank":24,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"TERRa (rus-Cyrl)":45.03} +{"Rank":25,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"TERRa (rus-Cyrl)":44.52} diff --git a/boards_data/ru/data_tasks/Reranking/default.jsonl b/boards_data/ru/data_tasks/Reranking/default.jsonl index be28c100234812ff524b86a3f20f44cb8d8fc785..14e74d4f7124aa81c3b912bd0d4e8ab325e295c6 100644 --- a/boards_data/ru/data_tasks/Reranking/default.jsonl +++ b/boards_data/ru/data_tasks/Reranking/default.jsonl @@ -1,25 +1,25 @@ -{"index":15,"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"RuBQReranking (rus-Cyrl)":75.58} -{"index":13,"Rank":2,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"RuBQReranking (rus-Cyrl)":74.61} -{"index":0,"Rank":3,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"RuBQReranking (rus-Cyrl)":74.02} -{"index":11,"Rank":4,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"RuBQReranking (rus-Cyrl)":73.08} -{"index":4,"Rank":5,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"RuBQReranking (rus-Cyrl)":72.41} -{"index":14,"Rank":6,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"RuBQReranking (rus-Cyrl)":72.01} -{"index":16,"Rank":7,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"RuBQReranking (rus-Cyrl)":71.46} -{"index":23,"Rank":8,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"RuBQReranking (rus-Cyrl)":68.65} -{"index":10,"Rank":9,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"RuBQReranking (rus-Cyrl)":64.42} -{"index":24,"Rank":10,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"RuBQReranking (rus-Cyrl)":62.15} -{"index":22,"Rank":11,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"RuBQReranking (rus-Cyrl)":58.77} -{"index":5,"Rank":12,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"RuBQReranking (rus-Cyrl)":56.13} -{"index":17,"Rank":13,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"RuBQReranking (rus-Cyrl)":55.13} -{"index":7,"Rank":14,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"RuBQReranking (rus-Cyrl)":54.83} -{"index":21,"Rank":15,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"RuBQReranking (rus-Cyrl)":52.8} -{"index":6,"Rank":16,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"RuBQReranking (rus-Cyrl)":46.81} -{"index":9,"Rank":17,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"RuBQReranking (rus-Cyrl)":46.09} -{"index":1,"Rank":18,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"RuBQReranking (rus-Cyrl)":42.58} -{"index":2,"Rank":19,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"RuBQReranking (rus-Cyrl)":41.65} -{"index":3,"Rank":20,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"RuBQReranking (rus-Cyrl)":39.89} -{"index":18,"Rank":21,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"RuBQReranking (rus-Cyrl)":38.51} -{"index":8,"Rank":22,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"RuBQReranking (rus-Cyrl)":35.44} -{"index":12,"Rank":23,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"RuBQReranking (rus-Cyrl)":34.01} -{"index":20,"Rank":24,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"RuBQReranking (rus-Cyrl)":30.96} -{"index":19,"Rank":25,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"RuBQReranking (rus-Cyrl)":27.05} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"RuBQReranking (rus-Cyrl)":75.58} +{"Rank":2,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"RuBQReranking (rus-Cyrl)":74.61} +{"Rank":3,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"RuBQReranking (rus-Cyrl)":74.02} +{"Rank":4,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"RuBQReranking (rus-Cyrl)":73.08} +{"Rank":5,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"RuBQReranking (rus-Cyrl)":72.41} +{"Rank":6,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"RuBQReranking (rus-Cyrl)":72.01} +{"Rank":7,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"RuBQReranking (rus-Cyrl)":71.46} +{"Rank":8,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"RuBQReranking (rus-Cyrl)":68.65} +{"Rank":9,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"RuBQReranking (rus-Cyrl)":64.42} +{"Rank":10,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"RuBQReranking (rus-Cyrl)":62.15} +{"Rank":11,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"RuBQReranking (rus-Cyrl)":58.77} +{"Rank":12,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"RuBQReranking (rus-Cyrl)":56.13} +{"Rank":13,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"RuBQReranking (rus-Cyrl)":55.13} +{"Rank":14,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"RuBQReranking (rus-Cyrl)":54.83} +{"Rank":15,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"RuBQReranking (rus-Cyrl)":52.8} +{"Rank":16,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"RuBQReranking (rus-Cyrl)":46.81} +{"Rank":17,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"RuBQReranking (rus-Cyrl)":46.09} +{"Rank":18,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"RuBQReranking (rus-Cyrl)":42.58} +{"Rank":19,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"RuBQReranking (rus-Cyrl)":41.65} +{"Rank":20,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"RuBQReranking (rus-Cyrl)":39.89} +{"Rank":21,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"RuBQReranking (rus-Cyrl)":38.51} +{"Rank":22,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"RuBQReranking (rus-Cyrl)":35.44} +{"Rank":23,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"RuBQReranking (rus-Cyrl)":34.01} +{"Rank":24,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"RuBQReranking (rus-Cyrl)":30.96} +{"Rank":25,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"RuBQReranking (rus-Cyrl)":27.05} diff --git a/boards_data/ru/data_tasks/Retrieval/default.jsonl b/boards_data/ru/data_tasks/Retrieval/default.jsonl index d4072b4f21bcd7118944bfac08ddfd1477d00b95..6b9538f6ec61f4d6f6b7e6481e66faa065c32b7d 100644 --- a/boards_data/ru/data_tasks/Retrieval/default.jsonl +++ b/boards_data/ru/data_tasks/Retrieval/default.jsonl @@ -1,25 +1,25 @@ -{"index":13,"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":77.96,"RiaNewsRetrieval (rus-Cyrl)":81.94,"RuBQRetrieval (rus-Cyrl)":73.98} -{"index":15,"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":77.39,"RiaNewsRetrieval (rus-Cyrl)":80.67,"RuBQRetrieval (rus-Cyrl)":74.11} -{"index":0,"Rank":3,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":77.1,"RiaNewsRetrieval (rus-Cyrl)":82.98,"RuBQRetrieval (rus-Cyrl)":71.21} -{"index":11,"Rank":4,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":76.78,"RiaNewsRetrieval (rus-Cyrl)":83.53,"RuBQRetrieval (rus-Cyrl)":70.03} -{"index":14,"Rank":5,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":69.91,"RiaNewsRetrieval (rus-Cyrl)":70.24,"RuBQRetrieval (rus-Cyrl)":69.58} -{"index":16,"Rank":6,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":69.27,"RiaNewsRetrieval (rus-Cyrl)":70.01,"RuBQRetrieval (rus-Cyrl)":68.53} -{"index":23,"Rank":7,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":67.54,"RiaNewsRetrieval (rus-Cyrl)":69.36,"RuBQRetrieval (rus-Cyrl)":65.71} -{"index":10,"Rank":8,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":67.34,"RiaNewsRetrieval (rus-Cyrl)":77.83,"RuBQRetrieval (rus-Cyrl)":56.86} -{"index":24,"Rank":9,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":51.5,"RiaNewsRetrieval (rus-Cyrl)":51.27,"RuBQRetrieval (rus-Cyrl)":51.73} -{"index":22,"Rank":10,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":44.4,"RiaNewsRetrieval (rus-Cyrl)":51.75,"RuBQRetrieval (rus-Cyrl)":37.04} -{"index":21,"Rank":11,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":37.26,"RiaNewsRetrieval (rus-Cyrl)":44.82,"RuBQRetrieval (rus-Cyrl)":29.7} -{"index":17,"Rank":12,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":36.38,"RiaNewsRetrieval (rus-Cyrl)":42.75,"RuBQRetrieval (rus-Cyrl)":30.02} -{"index":7,"Rank":13,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":31.88,"RiaNewsRetrieval (rus-Cyrl)":34.73,"RuBQRetrieval (rus-Cyrl)":29.03} -{"index":5,"Rank":14,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":25.6,"RiaNewsRetrieval (rus-Cyrl)":21.4,"RuBQRetrieval (rus-Cyrl)":29.8} -{"index":9,"Rank":15,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":12.4,"RiaNewsRetrieval (rus-Cyrl)":13.92,"RuBQRetrieval (rus-Cyrl)":10.87} -{"index":6,"Rank":16,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":11.78,"RiaNewsRetrieval (rus-Cyrl)":11.11,"RuBQRetrieval (rus-Cyrl)":12.45} -{"index":3,"Rank":17,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":9.68,"RiaNewsRetrieval (rus-Cyrl)":6.72,"RuBQRetrieval (rus-Cyrl)":12.63} -{"index":2,"Rank":18,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":7.55,"RiaNewsRetrieval (rus-Cyrl)":5.58,"RuBQRetrieval (rus-Cyrl)":9.52} -{"index":12,"Rank":19,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":7.5,"RiaNewsRetrieval (rus-Cyrl)":4.84,"RuBQRetrieval (rus-Cyrl)":10.15} -{"index":1,"Rank":20,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":7.37,"RiaNewsRetrieval (rus-Cyrl)":4.14,"RuBQRetrieval (rus-Cyrl)":10.6} -{"index":8,"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":2.02,"RiaNewsRetrieval (rus-Cyrl)":0.79,"RuBQRetrieval (rus-Cyrl)":3.24} -{"index":19,"Rank":22,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":1.66,"RiaNewsRetrieval (rus-Cyrl)":0.67,"RuBQRetrieval (rus-Cyrl)":2.64} -{"index":4,"Rank":23,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":70.94} -{"index":18,"Rank":24,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":8.84} -{"index":20,"Rank":25,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":4.75} +{"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":77.96,"RiaNewsRetrieval (rus-Cyrl)":81.94,"RuBQRetrieval (rus-Cyrl)":73.98} +{"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":77.39,"RiaNewsRetrieval (rus-Cyrl)":80.67,"RuBQRetrieval (rus-Cyrl)":74.11} +{"Rank":3,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":77.1,"RiaNewsRetrieval (rus-Cyrl)":82.98,"RuBQRetrieval (rus-Cyrl)":71.21} +{"Rank":4,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":76.78,"RiaNewsRetrieval (rus-Cyrl)":83.53,"RuBQRetrieval (rus-Cyrl)":70.03} +{"Rank":5,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":69.91,"RiaNewsRetrieval (rus-Cyrl)":70.24,"RuBQRetrieval (rus-Cyrl)":69.58} +{"Rank":6,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":69.27,"RiaNewsRetrieval (rus-Cyrl)":70.01,"RuBQRetrieval (rus-Cyrl)":68.53} +{"Rank":7,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":67.54,"RiaNewsRetrieval (rus-Cyrl)":69.36,"RuBQRetrieval (rus-Cyrl)":65.71} +{"Rank":8,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":67.34,"RiaNewsRetrieval (rus-Cyrl)":77.83,"RuBQRetrieval (rus-Cyrl)":56.86} +{"Rank":9,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":51.5,"RiaNewsRetrieval (rus-Cyrl)":51.27,"RuBQRetrieval (rus-Cyrl)":51.73} +{"Rank":10,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":44.4,"RiaNewsRetrieval (rus-Cyrl)":51.75,"RuBQRetrieval (rus-Cyrl)":37.04} +{"Rank":11,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":37.26,"RiaNewsRetrieval (rus-Cyrl)":44.82,"RuBQRetrieval (rus-Cyrl)":29.7} +{"Rank":12,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":36.38,"RiaNewsRetrieval (rus-Cyrl)":42.75,"RuBQRetrieval (rus-Cyrl)":30.02} +{"Rank":13,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":31.88,"RiaNewsRetrieval (rus-Cyrl)":34.73,"RuBQRetrieval (rus-Cyrl)":29.03} +{"Rank":14,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":25.6,"RiaNewsRetrieval (rus-Cyrl)":21.4,"RuBQRetrieval (rus-Cyrl)":29.8} +{"Rank":15,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":12.4,"RiaNewsRetrieval (rus-Cyrl)":13.92,"RuBQRetrieval (rus-Cyrl)":10.87} +{"Rank":16,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":11.78,"RiaNewsRetrieval (rus-Cyrl)":11.11,"RuBQRetrieval (rus-Cyrl)":12.45} +{"Rank":17,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":9.68,"RiaNewsRetrieval (rus-Cyrl)":6.72,"RuBQRetrieval (rus-Cyrl)":12.63} +{"Rank":18,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":7.55,"RiaNewsRetrieval (rus-Cyrl)":5.58,"RuBQRetrieval (rus-Cyrl)":9.52} +{"Rank":19,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":7.5,"RiaNewsRetrieval (rus-Cyrl)":4.84,"RuBQRetrieval (rus-Cyrl)":10.15} +{"Rank":20,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":7.37,"RiaNewsRetrieval (rus-Cyrl)":4.14,"RuBQRetrieval (rus-Cyrl)":10.6} +{"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":2.02,"RiaNewsRetrieval (rus-Cyrl)":0.79,"RuBQRetrieval (rus-Cyrl)":3.24} +{"Rank":22,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":1.66,"RiaNewsRetrieval (rus-Cyrl)":0.67,"RuBQRetrieval (rus-Cyrl)":2.64} +{"Rank":23,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"RiaNewsRetrieval (rus-Cyrl)":null,"RuBQRetrieval (rus-Cyrl)":70.94} +{"Rank":24,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"RiaNewsRetrieval (rus-Cyrl)":null,"RuBQRetrieval (rus-Cyrl)":8.84} +{"Rank":25,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"RiaNewsRetrieval (rus-Cyrl)":null,"RuBQRetrieval (rus-Cyrl)":4.75} diff --git a/boards_data/ru/data_tasks/STS/default.jsonl b/boards_data/ru/data_tasks/STS/default.jsonl index aabf9c6a22cf9688bd1d4059a8b184405769101b..d65bb348a98b52f6c0ee3f05ee08e5797e02f1a0 100644 --- a/boards_data/ru/data_tasks/STS/default.jsonl +++ b/boards_data/ru/data_tasks/STS/default.jsonl @@ -1,25 +1,25 @@ -{"index":13,"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":80.15,"RUParaPhraserSTS (rus-Cyrl)":76.17,"RuSTSBenchmarkSTS (rus-Cyrl)":84.13} -{"index":11,"Rank":2,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":79.85,"RUParaPhraserSTS (rus-Cyrl)":76.36,"RuSTSBenchmarkSTS (rus-Cyrl)":83.35} -{"index":10,"Rank":3,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":77.91,"RUParaPhraserSTS (rus-Cyrl)":73.56,"RuSTSBenchmarkSTS (rus-Cyrl)":82.26} -{"index":15,"Rank":4,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":77.48,"RUParaPhraserSTS (rus-Cyrl)":71.82,"RuSTSBenchmarkSTS (rus-Cyrl)":83.15} -{"index":0,"Rank":5,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":77.39,"RUParaPhraserSTS (rus-Cyrl)":74.9,"RuSTSBenchmarkSTS (rus-Cyrl)":79.87} -{"index":23,"Rank":6,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":77.37,"RUParaPhraserSTS (rus-Cyrl)":72.97,"RuSTSBenchmarkSTS (rus-Cyrl)":81.77} -{"index":24,"Rank":7,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":75.32,"RUParaPhraserSTS (rus-Cyrl)":72.15,"RuSTSBenchmarkSTS (rus-Cyrl)":78.48} -{"index":14,"Rank":8,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.9,"RUParaPhraserSTS (rus-Cyrl)":70.17,"RuSTSBenchmarkSTS (rus-Cyrl)":79.64} -{"index":16,"Rank":9,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":74.27,"RUParaPhraserSTS (rus-Cyrl)":70.46,"RuSTSBenchmarkSTS (rus-Cyrl)":78.08} -{"index":22,"Rank":10,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.1,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":82.46} -{"index":4,"Rank":11,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":73.67,"RUParaPhraserSTS (rus-Cyrl)":71.08,"RuSTSBenchmarkSTS (rus-Cyrl)":76.26} -{"index":21,"Rank":12,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":70.71,"RUParaPhraserSTS (rus-Cyrl)":61.87,"RuSTSBenchmarkSTS (rus-Cyrl)":79.55} -{"index":7,"Rank":13,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":69.6,"RUParaPhraserSTS (rus-Cyrl)":65.87,"RuSTSBenchmarkSTS (rus-Cyrl)":73.32} -{"index":17,"Rank":14,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":69.54,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":73.34} -{"index":5,"Rank":15,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":68.19,"RUParaPhraserSTS (rus-Cyrl)":65.17,"RuSTSBenchmarkSTS (rus-Cyrl)":71.22} -{"index":9,"Rank":16,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":67.28,"RUParaPhraserSTS (rus-Cyrl)":65.14,"RuSTSBenchmarkSTS (rus-Cyrl)":69.43} -{"index":3,"Rank":17,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":66.13,"RUParaPhraserSTS (rus-Cyrl)":66.24,"RuSTSBenchmarkSTS (rus-Cyrl)":66.03} -{"index":6,"Rank":18,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":60.44,"RUParaPhraserSTS (rus-Cyrl)":62.06,"RuSTSBenchmarkSTS (rus-Cyrl)":58.82} -{"index":1,"Rank":19,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":58.36,"RUParaPhraserSTS (rus-Cyrl)":55.01,"RuSTSBenchmarkSTS (rus-Cyrl)":61.72} -{"index":12,"Rank":20,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":56.25,"RUParaPhraserSTS (rus-Cyrl)":54.03,"RuSTSBenchmarkSTS (rus-Cyrl)":58.47} -{"index":8,"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":55.78,"RUParaPhraserSTS (rus-Cyrl)":53.41,"RuSTSBenchmarkSTS (rus-Cyrl)":58.16} -{"index":2,"Rank":22,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.84,"RUParaPhraserSTS (rus-Cyrl)":49.72,"RuSTSBenchmarkSTS (rus-Cyrl)":53.95} -{"index":18,"Rank":23,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":50.9,"RUParaPhraserSTS (rus-Cyrl)":45.47,"RuSTSBenchmarkSTS (rus-Cyrl)":56.33} -{"index":19,"Rank":24,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":49.74,"RUParaPhraserSTS (rus-Cyrl)":43.93,"RuSTSBenchmarkSTS (rus-Cyrl)":55.56} -{"index":20,"Rank":25,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":48.92,"RUParaPhraserSTS (rus-Cyrl)":42.15,"RuSTSBenchmarkSTS (rus-Cyrl)":55.68} +{"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":80.15,"RUParaPhraserSTS (rus-Cyrl)":76.17,"RuSTSBenchmarkSTS (rus-Cyrl)":84.13} +{"Rank":2,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":79.85,"RUParaPhraserSTS (rus-Cyrl)":76.36,"RuSTSBenchmarkSTS (rus-Cyrl)":83.35} +{"Rank":3,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":77.91,"RUParaPhraserSTS (rus-Cyrl)":73.56,"RuSTSBenchmarkSTS (rus-Cyrl)":82.26} +{"Rank":4,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":77.48,"RUParaPhraserSTS (rus-Cyrl)":71.82,"RuSTSBenchmarkSTS (rus-Cyrl)":83.15} +{"Rank":5,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":77.39,"RUParaPhraserSTS (rus-Cyrl)":74.9,"RuSTSBenchmarkSTS (rus-Cyrl)":79.87} +{"Rank":6,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":77.37,"RUParaPhraserSTS (rus-Cyrl)":72.97,"RuSTSBenchmarkSTS (rus-Cyrl)":81.77} +{"Rank":7,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":75.32,"RUParaPhraserSTS (rus-Cyrl)":72.15,"RuSTSBenchmarkSTS (rus-Cyrl)":78.48} +{"Rank":8,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.9,"RUParaPhraserSTS (rus-Cyrl)":70.17,"RuSTSBenchmarkSTS (rus-Cyrl)":79.64} +{"Rank":9,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":74.27,"RUParaPhraserSTS (rus-Cyrl)":70.46,"RuSTSBenchmarkSTS (rus-Cyrl)":78.08} +{"Rank":10,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.1,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":82.46} +{"Rank":11,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":73.67,"RUParaPhraserSTS (rus-Cyrl)":71.08,"RuSTSBenchmarkSTS (rus-Cyrl)":76.26} +{"Rank":12,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":70.71,"RUParaPhraserSTS (rus-Cyrl)":61.87,"RuSTSBenchmarkSTS (rus-Cyrl)":79.55} +{"Rank":13,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":69.6,"RUParaPhraserSTS (rus-Cyrl)":65.87,"RuSTSBenchmarkSTS (rus-Cyrl)":73.32} +{"Rank":14,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":69.54,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":73.34} +{"Rank":15,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":68.19,"RUParaPhraserSTS (rus-Cyrl)":65.17,"RuSTSBenchmarkSTS (rus-Cyrl)":71.22} +{"Rank":16,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":67.28,"RUParaPhraserSTS (rus-Cyrl)":65.14,"RuSTSBenchmarkSTS (rus-Cyrl)":69.43} +{"Rank":17,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":66.13,"RUParaPhraserSTS (rus-Cyrl)":66.24,"RuSTSBenchmarkSTS (rus-Cyrl)":66.03} +{"Rank":18,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":60.44,"RUParaPhraserSTS (rus-Cyrl)":62.06,"RuSTSBenchmarkSTS (rus-Cyrl)":58.82} +{"Rank":19,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":58.36,"RUParaPhraserSTS (rus-Cyrl)":55.01,"RuSTSBenchmarkSTS (rus-Cyrl)":61.72} +{"Rank":20,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":56.25,"RUParaPhraserSTS (rus-Cyrl)":54.03,"RuSTSBenchmarkSTS (rus-Cyrl)":58.47} +{"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":55.78,"RUParaPhraserSTS (rus-Cyrl)":53.41,"RuSTSBenchmarkSTS (rus-Cyrl)":58.16} +{"Rank":22,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.84,"RUParaPhraserSTS (rus-Cyrl)":49.72,"RuSTSBenchmarkSTS (rus-Cyrl)":53.95} +{"Rank":23,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":50.9,"RUParaPhraserSTS (rus-Cyrl)":45.47,"RuSTSBenchmarkSTS (rus-Cyrl)":56.33} +{"Rank":24,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":49.74,"RUParaPhraserSTS (rus-Cyrl)":43.93,"RuSTSBenchmarkSTS (rus-Cyrl)":55.56} +{"Rank":25,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":48.92,"RUParaPhraserSTS (rus-Cyrl)":42.15,"RuSTSBenchmarkSTS (rus-Cyrl)":55.68} diff --git a/boards_data/se/data_tasks/Classification/default.jsonl b/boards_data/se/data_tasks/Classification/default.jsonl index a6fb06469994e12257dfe9ba3013bdee3b121ceb..d2efe60de93c56eeca8f8b7fc5b011495104b438 100644 --- a/boards_data/se/data_tasks/Classification/default.jsonl +++ b/boards_data/se/data_tasks/Classification/default.jsonl @@ -1,47 +1,32 @@ -{"level_0":0,"index":13,"Rank":1,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":66.73,"MassiveIntentClassification (nb)":62.68,"MassiveScenarioClassification (nb)":67.44,"NoRecClassification":55.46,"NordicLangClassification":85.27,"NorwegianParliament":62.58,"ScalaNbClassification":66.97} -{"level_0":1,"index":12,"Rank":2,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":63.94,"MassiveIntentClassification (nb)":60.67,"MassiveScenarioClassification (nb)":67.31,"NoRecClassification":51.32,"NordicLangClassification":84.69,"NorwegianParliament":57.41,"ScalaNbClassification":62.25} -{"level_0":2,"index":24,"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":63.64,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":62.76,"NordicLangClassification":82.29,"NorwegianParliament":60.36,"ScalaNbClassification":50.44} -{"level_0":3,"index":30,"Rank":4,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"Average":61.75,"MassiveIntentClassification (nb)":54.2,"MassiveScenarioClassification (nb)":60.69,"NoRecClassification":53.4,"NordicLangClassification":82.67,"NorwegianParliament":59.33,"ScalaNbClassification":60.19} -{"level_0":4,"index":23,"Rank":5,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":61.63,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":57.58,"NordicLangClassification":75.94,"NorwegianParliament":59.94,"ScalaNbClassification":50.32} -{"level_0":5,"index":17,"Rank":6,"Model":"dfm-sentence-encoder-large-1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":61.34,"MassiveIntentClassification (nb)":57.57,"MassiveScenarioClassification (nb)":63.66,"NoRecClassification":50.46,"NordicLangClassification":75.98,"NorwegianParliament":57.66,"ScalaNbClassification":62.69} -{"level_0":6,"index":31,"Rank":7,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"Average":60.34,"MassiveIntentClassification (nb)":47.42,"MassiveScenarioClassification (nb)":54.25,"NoRecClassification":50.46,"NordicLangClassification":84.25,"NorwegianParliament":58.85,"ScalaNbClassification":66.79} -{"level_0":7,"index":26,"Rank":8,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":58.86,"MassiveIntentClassification (nb)":53.96,"MassiveScenarioClassification (nb)":59.9,"NoRecClassification":53.96,"NordicLangClassification":75.15,"NorwegianParliament":60.15,"ScalaNbClassification":50.06} -{"level_0":8,"index":16,"Rank":9,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":58.46,"MassiveIntentClassification (nb)":52.49,"MassiveScenarioClassification (nb)":54.59,"NoRecClassification":48.3,"NordicLangClassification":77.68,"NorwegianParliament":58.78,"ScalaNbClassification":58.95} -{"level_0":9,"index":45,"Rank":10,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":55.0,"MassiveIntentClassification (nb)":45.38,"MassiveScenarioClassification (nb)":47.55,"NoRecClassification":46.06,"NordicLangClassification":74.25,"NorwegianParliament":56.79,"ScalaNbClassification":59.99} -{"level_0":10,"index":18,"Rank":11,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.92,"MassiveIntentClassification (nb)":59.9,"MassiveScenarioClassification (nb)":65.81,"NoRecClassification":48.25,"NordicLangClassification":48.4,"NorwegianParliament":55.99,"ScalaNbClassification":51.18} -{"level_0":11,"index":46,"Rank":12,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":54.34,"MassiveIntentClassification (nb)":40.46,"MassiveScenarioClassification (nb)":44.83,"NoRecClassification":46.28,"NordicLangClassification":79.39,"NorwegianParliament":56.75,"ScalaNbClassification":58.33} -{"level_0":12,"index":19,"Rank":13,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":50.14,"MassiveIntentClassification (nb)":41.57,"MassiveScenarioClassification (nb)":50.33,"NoRecClassification":42.0,"NordicLangClassification":59.34,"NorwegianParliament":57.42,"ScalaNbClassification":50.18} -{"level_0":13,"index":20,"Rank":14,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":50.01,"MassiveIntentClassification (nb)":40.63,"MassiveScenarioClassification (nb)":51.91,"NoRecClassification":41.83,"NordicLangClassification":58.3,"NorwegianParliament":57.26,"ScalaNbClassification":50.13} -{"level_0":14,"index":8,"Rank":15,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":48.88,"MassiveIntentClassification (nb)":42.74,"MassiveScenarioClassification (nb)":49.49,"NoRecClassification":43.53,"NordicLangClassification":51.45,"NorwegianParliament":55.74,"ScalaNbClassification":50.34} -{"level_0":15,"index":22,"Rank":16,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":48.46,"MassiveIntentClassification (nb)":40.25,"MassiveScenarioClassification (nb)":48.58,"NoRecClassification":41.84,"NordicLangClassification":53.47,"NorwegianParliament":56.57,"ScalaNbClassification":50.03} -{"level_0":16,"index":6,"Rank":17,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":48.18,"MassiveIntentClassification (nb)":35.75,"MassiveScenarioClassification (nb)":35.76,"NoRecClassification":43.91,"NordicLangClassification":62.45,"NorwegianParliament":57.56,"ScalaNbClassification":53.63} -{"level_0":17,"index":34,"Rank":18,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.28,"MassiveIntentClassification (nb)":39.34,"MassiveScenarioClassification (nb)":44.67,"NoRecClassification":40.02,"NordicLangClassification":54.71,"NorwegianParliament":54.8,"ScalaNbClassification":50.17} -{"level_0":18,"index":29,"Rank":19,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.28,"MassiveIntentClassification (nb)":24.6,"MassiveScenarioClassification (nb)":27.3,"NoRecClassification":45.44,"NordicLangClassification":57.82,"NorwegianParliament":53.25,"ScalaNbClassification":75.28} -{"level_0":19,"index":7,"Rank":20,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":34.34,"MassiveIntentClassification (nb)":5.66,"MassiveScenarioClassification (nb)":11.26,"NoRecClassification":39.72,"NordicLangClassification":44.53,"NorwegianParliament":52.44,"ScalaNbClassification":52.41} -{"level_0":20,"index":0,"Rank":21,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","MassiveIntentClassification (nb)":37.74,"MassiveScenarioClassification (nb)":43.9,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":21,"index":1,"Rank":22,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":36.89,"MassiveScenarioClassification (nb)":44.27,"NoRecClassification":43.53,"NordicLangClassification":"","NorwegianParliament":54.9,"ScalaNbClassification":""} -{"level_0":22,"index":2,"Rank":23,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":28.65,"MassiveScenarioClassification (nb)":35.24,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":23,"index":3,"Rank":24,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":31.07,"MassiveScenarioClassification (nb)":38.02,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":24,"index":4,"Rank":25,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":25,"index":5,"Rank":26,"Model":"e5-dansk-test-0.1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":61.0,"NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":26,"index":9,"Rank":27,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":39.67,"MassiveScenarioClassification (nb)":50.89,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":27,"index":10,"Rank":28,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":39.82,"MassiveScenarioClassification (nb)":39.02,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":28,"index":11,"Rank":29,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":39.48,"MassiveScenarioClassification (nb)":40.47,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":29,"index":14,"Rank":30,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":54.41,"MassiveScenarioClassification (nb)":64.64,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":30,"index":15,"Rank":31,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":49.41,"MassiveScenarioClassification (nb)":51.8,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":31,"index":21,"Rank":32,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","MassiveIntentClassification (nb)":70.93,"MassiveScenarioClassification (nb)":75.7,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":32,"index":25,"Rank":33,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":71.66,"MassiveScenarioClassification (nb)":77.21,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":33,"index":27,"Rank":34,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":46.18,"MassiveScenarioClassification (nb)":50.32,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":34,"index":28,"Rank":35,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":44.12,"MassiveScenarioClassification (nb)":46.79,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":35,"index":32,"Rank":36,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","MassiveIntentClassification (nb)":57.91,"MassiveScenarioClassification (nb)":64.29,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":36,"index":33,"Rank":37,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","MassiveIntentClassification (nb)":41.91,"MassiveScenarioClassification (nb)":47.36,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":37,"index":35,"Rank":38,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":38,"index":36,"Rank":39,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":34.75,"MassiveScenarioClassification (nb)":35.71,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":39,"index":37,"Rank":40,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","MassiveIntentClassification (nb)":46.01,"MassiveScenarioClassification (nb)":54.98,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":40,"index":38,"Rank":41,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":43.78,"MassiveScenarioClassification (nb)":52.71,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":41,"index":39,"Rank":42,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.87,"MassiveScenarioClassification (nb)":54.44,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":42,"index":40,"Rank":43,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":38.53,"MassiveScenarioClassification (nb)":46.6,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":43,"index":41,"Rank":44,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":41.29,"MassiveScenarioClassification (nb)":49.92,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":44,"index":42,"Rank":45,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.91,"MassiveScenarioClassification (nb)":53.43,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":45,"index":43,"Rank":46,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":31.49,"MassiveScenarioClassification (nb)":38.05,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} -{"level_0":46,"index":44,"Rank":47,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":54.64,"MassiveScenarioClassification (nb)":60.26,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":65.06,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":62.76,"NoRecClassification (nob-Latn)":58.43,"NordicLangClassification":82.29,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":80.15,"NorwegianParliament":60.36,"ScalaNbClassification":50.44} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":62.42,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":57.58,"NoRecClassification (nob-Latn)":53.74,"NordicLangClassification":75.94,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":75.85,"NorwegianParliament":59.94,"ScalaNbClassification":50.32} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":59.43,"MassiveIntentClassification (nb)":53.96,"MassiveScenarioClassification (nb)":59.9,"NoRecClassification":53.96,"NoRecClassification (nob-Latn)":50.08,"NordicLangClassification":75.15,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":72.15,"NorwegianParliament":60.15,"ScalaNbClassification":50.06} +{"Rank":4,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.04,"MassiveIntentClassification (nb)":39.34,"MassiveScenarioClassification (nb)":44.67,"NoRecClassification":40.02,"NoRecClassification (nob-Latn)":37.93,"NordicLangClassification":54.71,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":54.7,"NorwegianParliament":54.8,"ScalaNbClassification":50.17} +{"Rank":5,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","MassiveIntentClassification (nb)":37.74,"MassiveScenarioClassification (nb)":43.9,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":6,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NoRecClassification (nob-Latn)":52.05,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":63.6,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":7,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","MassiveIntentClassification (nb)":35.75,"MassiveScenarioClassification (nb)":35.76,"NoRecClassification":43.91,"NoRecClassification (nob-Latn)":"","NordicLangClassification":62.45,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":57.56,"ScalaNbClassification":53.63} +{"Rank":8,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":"","MassiveIntentClassification (nb)":5.66,"MassiveScenarioClassification (nb)":11.26,"NoRecClassification":39.72,"NoRecClassification (nob-Latn)":"","NordicLangClassification":44.53,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":52.44,"ScalaNbClassification":52.41} +{"Rank":9,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","MassiveIntentClassification (nb)":42.74,"MassiveScenarioClassification (nb)":49.49,"NoRecClassification":43.53,"NoRecClassification (nob-Latn)":"","NordicLangClassification":51.45,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":55.74,"ScalaNbClassification":50.34} +{"Rank":10,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","MassiveIntentClassification (nb)":60.67,"MassiveScenarioClassification (nb)":67.31,"NoRecClassification":51.32,"NoRecClassification (nob-Latn)":"","NordicLangClassification":84.69,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":57.41,"ScalaNbClassification":62.25} +{"Rank":11,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":"","MassiveIntentClassification (nb)":62.68,"MassiveScenarioClassification (nb)":67.44,"NoRecClassification":55.46,"NoRecClassification (nob-Latn)":"","NordicLangClassification":85.27,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":62.58,"ScalaNbClassification":66.97} +{"Rank":12,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":"","MassiveIntentClassification (nb)":52.49,"MassiveScenarioClassification (nb)":54.59,"NoRecClassification":48.3,"NoRecClassification (nob-Latn)":"","NordicLangClassification":77.68,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":58.78,"ScalaNbClassification":58.95} +{"Rank":13,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":41.57,"MassiveScenarioClassification (nb)":50.33,"NoRecClassification":42.0,"NoRecClassification (nob-Latn)":"","NordicLangClassification":59.34,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":57.42,"ScalaNbClassification":50.18} +{"Rank":14,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","MassiveIntentClassification (nb)":40.63,"MassiveScenarioClassification (nb)":51.91,"NoRecClassification":41.83,"NoRecClassification (nob-Latn)":"","NordicLangClassification":58.3,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":57.26,"ScalaNbClassification":50.13} +{"Rank":15,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","MassiveIntentClassification (nb)":40.25,"MassiveScenarioClassification (nb)":48.58,"NoRecClassification":41.84,"NoRecClassification (nob-Latn)":"","NordicLangClassification":53.47,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":56.57,"ScalaNbClassification":50.03} +{"Rank":16,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","MassiveIntentClassification (nb)":24.6,"MassiveScenarioClassification (nb)":27.3,"NoRecClassification":45.44,"NoRecClassification (nob-Latn)":"","NordicLangClassification":57.82,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":53.25,"ScalaNbClassification":75.28} +{"Rank":17,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"Average":"","MassiveIntentClassification (nb)":54.2,"MassiveScenarioClassification (nb)":60.69,"NoRecClassification":53.4,"NoRecClassification (nob-Latn)":"","NordicLangClassification":82.67,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":59.33,"ScalaNbClassification":60.19} +{"Rank":18,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"Average":"","MassiveIntentClassification (nb)":47.42,"MassiveScenarioClassification (nb)":54.25,"NoRecClassification":50.46,"NoRecClassification (nob-Latn)":"","NordicLangClassification":84.25,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":58.85,"ScalaNbClassification":66.79} +{"Rank":19,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","MassiveIntentClassification (nb)":57.91,"MassiveScenarioClassification (nb)":64.29,"NoRecClassification":"","NoRecClassification (nob-Latn)":45.45,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":35.39,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":20,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","MassiveIntentClassification (nb)":41.91,"MassiveScenarioClassification (nb)":47.36,"NoRecClassification":"","NoRecClassification (nob-Latn)":37.73,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":54.17,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":21,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NoRecClassification (nob-Latn)":38.34,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":50.15,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":22,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":34.75,"MassiveScenarioClassification (nb)":35.71,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":23,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","MassiveIntentClassification (nb)":46.01,"MassiveScenarioClassification (nb)":54.98,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":24,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":43.78,"MassiveScenarioClassification (nb)":52.71,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":25,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.87,"MassiveScenarioClassification (nb)":54.44,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":26,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NoRecClassification (nob-Latn)":46.7,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":42.52,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":27,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NoRecClassification (nob-Latn)":50.32,"NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":41.57,"NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":28,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":38.53,"MassiveScenarioClassification (nb)":46.6,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":29,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":41.29,"MassiveScenarioClassification (nb)":49.92,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":30,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.91,"MassiveScenarioClassification (nb)":53.43,"NoRecClassification":"","NoRecClassification (nob-Latn)":"","NordicLangClassification":"","NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":"","ScalaNbClassification":""} +{"Rank":31,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","MassiveIntentClassification (nb)":45.38,"MassiveScenarioClassification (nb)":47.55,"NoRecClassification":46.06,"NoRecClassification (nob-Latn)":"","NordicLangClassification":74.25,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":56.79,"ScalaNbClassification":59.99} +{"Rank":32,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","MassiveIntentClassification (nb)":40.46,"MassiveScenarioClassification (nb)":44.83,"NoRecClassification":46.28,"NoRecClassification (nob-Latn)":"","NordicLangClassification":79.39,"NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)":"","NorwegianParliament":56.75,"ScalaNbClassification":58.33} diff --git a/boards_data/zh/data_overall/default.jsonl b/boards_data/zh/data_overall/default.jsonl index b1aaf4ba8c7cbcabf8d3ccad522cc56f22387938..63aa49d15c99072d033dac043bb0e28ffaf51f97 100644 --- a/boards_data/zh/data_overall/default.jsonl +++ b/boards_data/zh/data_overall/default.jsonl @@ -1,178 +1,110 @@ -{"index":207,"Rank":1,"Model":"xiaobu-embedding-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":72.43,"Classification Average (9 datasets)":74.67,"Clustering Average (4 datasets)":65.17,"PairClassification Average (2 datasets)":91.87,"Reranking Average (4 datasets)":72.58,"Retrieval Average (8 datasets)":76.5,"STS Average (8 datasets)":64.53} -{"index":17,"Rank":2,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Embedding Dimensions":3584,"Max Tokens":32768,"Average (35 datasets)":72.05,"Classification Average (9 datasets)":75.09,"Clustering Average (4 datasets)":66.06,"PairClassification Average (2 datasets)":87.48,"Reranking Average (4 datasets)":68.92,"Retrieval Average (8 datasets)":76.03,"STS Average (8 datasets)":65.33} -{"index":234,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":72.05,"Classification Average (9 datasets)":75.09,"Clustering Average (4 datasets)":66.06,"PairClassification Average (2 datasets)":87.48,"Reranking Average (4 datasets)":68.92,"Retrieval Average (8 datasets)":76.03,"STS Average (8 datasets)":65.33} -{"index":142,"Rank":4,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":72.05,"Classification Average (9 datasets)":75.09,"Clustering Average (4 datasets)":66.06,"PairClassification Average (2 datasets)":87.48,"Reranking Average (4 datasets)":68.92,"Retrieval Average (8 datasets)":76.03,"STS Average (8 datasets)":65.33} -{"index":169,"Rank":5,"Model":"zpoint_large_embedding_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":71.88,"Classification Average (9 datasets)":74.43,"Clustering Average (4 datasets)":62.23,"PairClassification Average (2 datasets)":91.55,"Reranking Average (4 datasets)":72.34,"Retrieval Average (8 datasets)":76.36,"STS Average (8 datasets)":64.22} -{"index":50,"Rank":6,"Model":"IYun-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":71.04,"Classification Average (9 datasets)":74.18,"Clustering Average (4 datasets)":66.35,"PairClassification Average (2 datasets)":90.87,"Reranking Average (4 datasets)":69.3,"Retrieval Average (8 datasets)":73.56,"STS Average (8 datasets)":63.23} -{"index":253,"Rank":7,"Model":"piccolo-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":70.95,"Classification Average (9 datasets)":74.59,"Clustering Average (4 datasets)":62.17,"PairClassification Average (2 datasets)":90.24,"Reranking Average (4 datasets)":70.0,"Retrieval Average (8 datasets)":74.36,"STS Average (8 datasets)":63.5} -{"index":276,"Rank":8,"Model":"AGE_Hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":70.85,"Classification Average (9 datasets)":74.14,"Clustering Average (4 datasets)":66.64,"PairClassification Average (2 datasets)":87.08,"Reranking Average (4 datasets)":68.72,"Retrieval Average (8 datasets)":74.97,"STS Average (8 datasets)":62.15} -{"index":38,"Rank":9,"Model":"Yinka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":70.78,"Classification Average (9 datasets)":74.3,"Clustering Average (4 datasets)":61.98,"PairClassification Average (2 datasets)":89.88,"Reranking Average (4 datasets)":69.78,"Retrieval Average (8 datasets)":74.41,"STS Average (8 datasets)":63.33} -{"index":15,"Rank":10,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (35 datasets)":69.56,"Classification Average (9 datasets)":73.36,"Clustering Average (4 datasets)":67.08,"PairClassification Average (2 datasets)":88.52,"Reranking Average (4 datasets)":66.38,"Retrieval Average (8 datasets)":70.62,"STS Average (8 datasets)":62.32} -{"index":129,"Rank":11,"Model":"acge_text_embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":69.07,"Classification Average (9 datasets)":72.75,"Clustering Average (4 datasets)":58.7,"PairClassification Average (2 datasets)":87.84,"Reranking Average (4 datasets)":67.98,"Retrieval Average (8 datasets)":72.93,"STS Average (8 datasets)":62.09} -{"index":12,"Rank":12,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1792,"Max Tokens":512,"Average (35 datasets)":68.71,"Classification Average (9 datasets)":71.74,"Clustering Average (4 datasets)":53.75,"PairClassification Average (2 datasets)":88.1,"Reranking Average (4 datasets)":68.27,"Retrieval Average (8 datasets)":74.41,"STS Average (8 datasets)":62.46} -{"index":155,"Rank":13,"Model":"stella-mrl-large-zh-v3.5-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":68.55,"Classification Average (9 datasets)":71.56,"Clustering Average (4 datasets)":54.32,"PairClassification Average (2 datasets)":88.08,"Reranking Average (4 datasets)":68.45,"Retrieval Average (8 datasets)":73.52,"STS Average (8 datasets)":62.48} -{"index":116,"Rank":14,"Model":"stella-mrl-large-zh-v3.5-1792d-1024<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":68.55,"Classification Average (9 datasets)":71.56,"Clustering Average (4 datasets)":54.32,"PairClassification Average (2 datasets)":88.08,"Reranking Average (4 datasets)":68.45,"Retrieval Average (8 datasets)":73.52,"STS Average (8 datasets)":62.48} -{"index":154,"Rank":15,"Model":"stella-large-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":68.48,"Classification Average (9 datasets)":71.5,"Clustering Average (4 datasets)":53.9,"PairClassification Average (2 datasets)":88.1,"Reranking Average (4 datasets)":68.26,"Retrieval Average (8 datasets)":73.6,"STS Average (8 datasets)":62.46} -{"index":315,"Rank":16,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":68.34,"Classification Average (9 datasets)":72.84,"Clustering Average (4 datasets)":56.88,"PairClassification Average (2 datasets)":82.32,"Reranking Average (4 datasets)":69.67,"Retrieval Average (8 datasets)":73.12,"STS Average (8 datasets)":60.07} -{"index":173,"Rank":17,"Model":"stella-base-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":67.96,"Classification Average (9 datasets)":71.12,"Clustering Average (4 datasets)":53.3,"PairClassification Average (2 datasets)":87.93,"Reranking Average (4 datasets)":67.84,"Retrieval Average (8 datasets)":72.28,"STS Average (8 datasets)":62.49} -{"index":233,"Rank":18,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":67.65,"Classification Average (9 datasets)":71.12,"Clustering Average (4 datasets)":54.61,"PairClassification Average (2 datasets)":86.91,"Reranking Average (4 datasets)":68.21,"Retrieval Average (8 datasets)":71.86,"STS Average (8 datasets)":60.96} -{"index":16,"Rank":19,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":67.65,"Classification Average (9 datasets)":71.12,"Clustering Average (4 datasets)":54.61,"PairClassification Average (2 datasets)":86.91,"Reranking Average (4 datasets)":68.21,"Retrieval Average (8 datasets)":71.86,"STS Average (8 datasets)":60.96} -{"index":46,"Rank":20,"Model":"Dmeta-embedding-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":67.51,"Classification Average (9 datasets)":70.0,"Clustering Average (4 datasets)":50.96,"PairClassification Average (2 datasets)":88.92,"Reranking Average (4 datasets)":67.17,"Retrieval Average (8 datasets)":70.41,"STS Average (8 datasets)":64.89} -{"index":206,"Rank":21,"Model":"xiaobu-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":67.28,"Classification Average (9 datasets)":71.2,"Clustering Average (4 datasets)":54.62,"PairClassification Average (2 datasets)":85.3,"Reranking Average (4 datasets)":67.34,"Retrieval Average (8 datasets)":73.41,"STS Average (8 datasets)":58.52} -{"index":105,"Rank":22,"Model":"alime-embedding-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":67.17,"Classification Average (9 datasets)":71.35,"Clustering Average (4 datasets)":54.0,"PairClassification Average (2 datasets)":84.34,"Reranking Average (4 datasets)":67.61,"Retrieval Average (8 datasets)":73.3,"STS Average (8 datasets)":58.41} -{"index":286,"Rank":23,"Model":"gte-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":66.72,"Classification Average (9 datasets)":71.34,"Clustering Average (4 datasets)":53.07,"PairClassification Average (2 datasets)":84.41,"Reranking Average (4 datasets)":67.4,"Retrieval Average (8 datasets)":72.49,"STS Average (8 datasets)":57.82} -{"index":47,"Rank":24,"Model":"Dmeta-embedding-zh-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":66.1,"Classification Average (9 datasets)":69.89,"Clustering Average (4 datasets)":50.8,"PairClassification Average (2 datasets)":87.57,"Reranking Average (4 datasets)":66.92,"Retrieval Average (8 datasets)":67.7,"STS Average (8 datasets)":62.13} -{"index":284,"Rank":25,"Model":"gte-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":65.92,"Classification Average (9 datasets)":71.26,"Clustering Average (4 datasets)":53.86,"PairClassification Average (2 datasets)":80.44,"Reranking Average (4 datasets)":67.0,"Retrieval Average (8 datasets)":71.71,"STS Average (8 datasets)":55.96} -{"index":21,"Rank":26,"Model":"tao-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":65.5,"Classification Average (9 datasets)":69.05,"Clustering Average (4 datasets)":49.04,"PairClassification Average (2 datasets)":82.68,"Reranking Average (4 datasets)":66.38,"Retrieval Average (8 datasets)":71.85,"STS Average (8 datasets)":58.66} -{"index":20,"Rank":27,"Model":"tao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":65.14,"Classification Average (9 datasets)":69.05,"Clustering Average (4 datasets)":49.0,"PairClassification Average (2 datasets)":82.68,"Reranking Average (4 datasets)":66.39,"Retrieval Average (8 datasets)":70.26,"STS Average (8 datasets)":58.66} -{"index":175,"Rank":28,"Model":"stella-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":65.13,"Classification Average (9 datasets)":69.05,"Clustering Average (4 datasets)":49.16,"PairClassification Average (2 datasets)":82.68,"Reranking Average (4 datasets)":66.41,"Retrieval Average (8 datasets)":70.14,"STS Average (8 datasets)":58.66} -{"index":174,"Rank":29,"Model":"stella-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":64.54,"Classification Average (9 datasets)":67.62,"Clustering Average (4 datasets)":48.65,"PairClassification Average (2 datasets)":78.72,"Reranking Average (4 datasets)":65.98,"Retrieval Average (8 datasets)":71.02,"STS Average (8 datasets)":58.3} -{"index":27,"Rank":30,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":64.53,"Classification Average (9 datasets)":69.13,"Clustering Average (4 datasets)":48.99,"PairClassification Average (2 datasets)":81.6,"Reranking Average (4 datasets)":65.84,"Retrieval Average (8 datasets)":70.46,"STS Average (8 datasets)":56.25} -{"index":172,"Rank":31,"Model":"stella-base-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":64.36,"Classification Average (9 datasets)":68.29,"Clustering Average (4 datasets)":49.4,"PairClassification Average (2 datasets)":79.96,"Reranking Average (4 datasets)":66.1,"Retrieval Average (8 datasets)":70.08,"STS Average (8 datasets)":56.92} -{"index":51,"Rank":32,"Model":"mist-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":64.25,"Classification Average (9 datasets)":68.21,"Clustering Average (4 datasets)":49.48,"PairClassification Average (2 datasets)":80.01,"Reranking Average (4 datasets)":65.89,"Retrieval Average (8 datasets)":69.59,"STS Average (8 datasets)":57.08} -{"index":171,"Rank":33,"Model":"stella-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":64.16,"Classification Average (9 datasets)":67.77,"Clustering Average (4 datasets)":48.7,"PairClassification Average (2 datasets)":76.09,"Reranking Average (4 datasets)":66.95,"Retrieval Average (8 datasets)":71.07,"STS Average (8 datasets)":56.54} -{"index":252,"Rank":34,"Model":"piccolo-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":64.11,"Classification Average (9 datasets)":67.03,"Clustering Average (4 datasets)":47.04,"PairClassification Average (2 datasets)":78.38,"Reranking Average (4 datasets)":65.98,"Retrieval Average (8 datasets)":70.93,"STS Average (8 datasets)":58.02} -{"index":199,"Rank":35,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":63.79,"Classification Average (9 datasets)":64.94,"Clustering Average (4 datasets)":46.47,"PairClassification Average (2 datasets)":82.94,"Reranking Average (4 datasets)":66.57,"Retrieval Average (8 datasets)":69.4,"STS Average (8 datasets)":59.39} -{"index":280,"Rank":36,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":63.79,"Classification Average (9 datasets)":64.94,"Clustering Average (4 datasets)":46.47,"PairClassification Average (2 datasets)":82.94,"Reranking Average (4 datasets)":66.57,"Retrieval Average (8 datasets)":69.4,"STS Average (8 datasets)":59.39} -{"index":251,"Rank":37,"Model":"piccolo-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":63.66,"Classification Average (9 datasets)":66.98,"Clustering Average (4 datasets)":47.12,"PairClassification Average (2 datasets)":76.61,"Reranking Average (4 datasets)":66.68,"Retrieval Average (8 datasets)":71.2,"STS Average (8 datasets)":55.9} -{"index":26,"Rank":38,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":63.4,"Classification Average (9 datasets)":68.58,"Clustering Average (4 datasets)":50.01,"PairClassification Average (2 datasets)":76.77,"Reranking Average (4 datasets)":64.9,"Retrieval Average (8 datasets)":70.54,"STS Average (8 datasets)":53.0} -{"index":23,"Rank":39,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":63.13,"Classification Average (9 datasets)":68.07,"Clustering Average (4 datasets)":47.53,"PairClassification Average (2 datasets)":79.76,"Reranking Average (4 datasets)":65.4,"Retrieval Average (8 datasets)":69.49,"STS Average (8 datasets)":53.72} -{"index":41,"Rank":40,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":62.18,"Classification Average (9 datasets)":69.09,"Clustering Average (4 datasets)":48.88,"PairClassification Average (2 datasets)":82.38,"Reranking Average (4 datasets)":62.82,"Retrieval Average (8 datasets)":63.52,"STS Average (8 datasets)":54.35} -{"index":40,"Rank":41,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":62.09,"Classification Average (9 datasets)":67.89,"Clustering Average (4 datasets)":48.81,"PairClassification Average (2 datasets)":82.38,"Reranking Average (4 datasets)":63.2,"Retrieval Average (8 datasets)":64.11,"STS Average (8 datasets)":54.55} -{"index":274,"Rank":42,"Model":"ZNV-Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":61.77,"Classification Average (9 datasets)":64.37,"Clustering Average (4 datasets)":48.05,"PairClassification Average (2 datasets)":68.76,"Reranking Average (4 datasets)":61.48,"Retrieval Average (8 datasets)":58.2,"STS Average (8 datasets)":67.66} -{"index":180,"Rank":43,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (35 datasets)":60.81,"Classification Average (9 datasets)":70.17,"Clustering Average (4 datasets)":52.3,"PairClassification Average (2 datasets)":72.19,"Reranking Average (4 datasets)":61.86,"Retrieval Average (8 datasets)":61.75,"STS Average (8 datasets)":50.22} -{"index":288,"Rank":44,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":60.08,"Classification Average (9 datasets)":64.49,"Clustering Average (4 datasets)":48.95,"PairClassification Average (2 datasets)":70.0,"Reranking Average (4 datasets)":66.22,"Retrieval Average (8 datasets)":65.5,"STS Average (8 datasets)":49.72} -{"index":39,"Rank":45,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":59.77,"Classification Average (9 datasets)":67.08,"Clustering Average (4 datasets)":47.21,"PairClassification Average (2 datasets)":78.77,"Reranking Average (4 datasets)":61.35,"Retrieval Average (8 datasets)":60.6,"STS Average (8 datasets)":51.45} -{"index":226,"Rank":46,"Model":"m3e-ernie-xbase-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":59.03,"Classification Average (9 datasets)":68.83,"Clustering Average (4 datasets)":48.64,"PairClassification Average (2 datasets)":62.84,"Reranking Average (4 datasets)":60.96,"Retrieval Average (8 datasets)":59.76,"STS Average (8 datasets)":50.54} -{"index":184,"Rank":47,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (35 datasets)":58.81,"Classification Average (9 datasets)":67.34,"Clustering Average (4 datasets)":48.23,"PairClassification Average (2 datasets)":69.89,"Reranking Average (4 datasets)":56.0,"Retrieval Average (8 datasets)":63.66,"STS Average (8 datasets)":48.29} -{"index":33,"Rank":48,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":512,"Max Tokens":512,"Average (35 datasets)":57.82,"Classification Average (9 datasets)":63.96,"Clustering Average (4 datasets)":44.18,"PairClassification Average (2 datasets)":70.4,"Reranking Average (4 datasets)":60.92,"Retrieval Average (8 datasets)":61.77,"STS Average (8 datasets)":49.1} -{"index":220,"Rank":49,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":57.79,"Classification Average (9 datasets)":67.52,"Clustering Average (4 datasets)":47.68,"PairClassification Average (2 datasets)":63.99,"Reranking Average (4 datasets)":59.34,"Retrieval Average (8 datasets)":56.91,"STS Average (8 datasets)":50.47} -{"index":221,"Rank":50,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":57.66,"Classification Average (9 datasets)":68.2,"Clustering Average (4 datasets)":48.88,"PairClassification Average (2 datasets)":64.3,"Reranking Average (4 datasets)":59.66,"Retrieval Average (8 datasets)":54.75,"STS Average (8 datasets)":50.42} -{"index":183,"Rank":51,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (35 datasets)":56.21,"Classification Average (9 datasets)":65.35,"Clustering Average (4 datasets)":40.68,"PairClassification Average (2 datasets)":67.07,"Reranking Average (4 datasets)":54.35,"Retrieval Average (8 datasets)":61.63,"STS Average (8 datasets)":46.49} -{"index":186,"Rank":52,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (35 datasets)":56.11,"Classification Average (9 datasets)":65.85,"Clustering Average (4 datasets)":45.26,"PairClassification Average (2 datasets)":66.45,"Reranking Average (4 datasets)":53.86,"Retrieval Average (8 datasets)":59.95,"STS Average (8 datasets)":45.27} -{"index":188,"Rank":53,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":2048,"Average (35 datasets)":56.03,"Classification Average (9 datasets)":65.31,"Clustering Average (4 datasets)":42.42,"PairClassification Average (2 datasets)":74.96,"Reranking Average (4 datasets)":56.27,"Retrieval Average (8 datasets)":57.96,"STS Average (8 datasets)":45.6} -{"index":311,"Rank":54,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":8191,"Average (35 datasets)":53.73,"Classification Average (9 datasets)":64.31,"Clustering Average (4 datasets)":45.68,"PairClassification Average (2 datasets)":69.56,"Reranking Average (4 datasets)":54.28,"Retrieval Average (8 datasets)":52.0,"STS Average (8 datasets)":43.35} -{"index":190,"Rank":55,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":2048,"Average (35 datasets)":51.55,"Classification Average (9 datasets)":62.85,"Clustering Average (4 datasets)":42.42,"PairClassification Average (2 datasets)":72.38,"Reranking Average (4 datasets)":53.62,"Retrieval Average (8 datasets)":46.16,"STS Average (8 datasets)":42.57} -{"index":279,"Rank":56,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":50.12,"Classification Average (9 datasets)":61.0,"Clustering Average (4 datasets)":44.39,"PairClassification Average (2 datasets)":66.62,"Reranking Average (4 datasets)":49.25,"Retrieval Average (8 datasets)":44.4,"STS Average (8 datasets)":42.78} -{"index":277,"Rank":57,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":48.59,"Classification Average (9 datasets)":62.19,"Clustering Average (4 datasets)":37.66,"PairClassification Average (2 datasets)":67.41,"Reranking Average (4 datasets)":49.45,"Retrieval Average (8 datasets)":38.79,"STS Average (8 datasets)":43.41} -{"index":53,"Rank":58,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":48.56,"Classification Average (9 datasets)":60.66,"Clustering Average (4 datasets)":30.02,"PairClassification Average (2 datasets)":70.86,"Reranking Average (4 datasets)":49.16,"Retrieval Average (8 datasets)":41.94,"STS Average (8 datasets)":44.97} -{"index":48,"Rank":59,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":46.83,"Classification Average (9 datasets)":62.54,"Clustering Average (4 datasets)":31.8,"PairClassification Average (2 datasets)":81.46,"Reranking Average (4 datasets)":45.08,"Retrieval Average (8 datasets)":32.1,"STS Average (8 datasets)":43.62} -{"index":49,"Rank":60,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (35 datasets)":44.59,"Classification Average (9 datasets)":62.7,"Clustering Average (4 datasets)":39.67,"PairClassification Average (2 datasets)":80.9,"Reranking Average (4 datasets)":44.91,"Retrieval Average (8 datasets)":22.92,"STS Average (8 datasets)":39.11} -{"index":159,"Rank":61,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":35.89,"Classification Average (9 datasets)":57.87,"Clustering Average (4 datasets)":34.44,"PairClassification Average (2 datasets)":58.52,"Reranking Average (4 datasets)":32.84,"Retrieval Average (8 datasets)":7.15,"STS Average (8 datasets)":36.48} -{"index":289,"Rank":62,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":23.88,"Classification Average (9 datasets)":38.04,"Clustering Average (4 datasets)":18.34,"PairClassification Average (2 datasets)":56.7,"Reranking Average (4 datasets)":25.86,"Retrieval Average (8 datasets)":4.52,"STS Average (8 datasets)":20.86} -{"index":0,"Rank":63,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Embedding Dimensions":256,"Max Tokens":2048,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":1,"Rank":64,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Embedding Dimensions":768,"Max Tokens":2048,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":2,"Rank":65,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":8000,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":4,"Rank":67,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":1024,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":5,"Rank":68,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":16000,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":6,"Rank":69,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":16000,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":7,"Rank":70,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":16000,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":8,"Rank":71,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":4000,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":9,"Rank":72,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Embedding Dimensions":1024,"Max Tokens":4000,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":10,"Rank":73,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":32000,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":11,"Rank":74,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Embedding Dimensions":1024,"Max Tokens":"N\/A","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":22,"Rank":79,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":25,"Rank":81,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":28,"Rank":82,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":29,"Rank":83,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":73.93,"Clustering Average (4 datasets)":59.3,"PairClassification Average (2 datasets)":86.67,"Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":73.73,"STS Average (8 datasets)":56.87} -{"index":30,"Rank":84,"Model":"bge-reranker-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":67.03,"Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":31,"Rank":85,"Model":"bge-reranker-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":67.03,"Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":32,"Rank":86,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":512,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":43,"Rank":92,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":44,"Rank":93,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":384,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":45,"Rank":94,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":54,"Rank":96,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":55,"Rank":97,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":56,"Rank":98,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":57,"Rank":99,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Embedding Dimensions":"","Max Tokens":4096,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":72,"Rank":114,"Model":"Zhihui_LLM_Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":76.74,"STS Average (8 datasets)":""} -{"index":73,"Rank":115,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":75,"Rank":117,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Embedding Dimensions":4096,"Max Tokens":4096,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":76,"Rank":118,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Embedding Dimensions":4096,"Max Tokens":4096,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":77,"Rank":119,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Embedding Dimensions":4096,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":78,"Rank":120,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Embedding Dimensions":4096,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":79,"Rank":121,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":80,"Rank":122,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":81,"Rank":123,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Embedding Dimensions":2048,"Max Tokens":4096,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":82,"Rank":124,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Embedding Dimensions":2048,"Max Tokens":4096,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":96,"Rank":138,"Model":"qwen-1.8b-retrieval-test<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":70.63,"STS Average (8 datasets)":""} -{"index":106,"Rank":147,"Model":"alime-reranker-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":67.54,"Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":109,"Rank":150,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":120,"Rank":160,"Model":"PEG<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":68.41,"Retrieval Average (8 datasets)":73.78,"STS Average (8 datasets)":""} -{"index":122,"Rank":162,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Embedding Dimensions":2048,"Max Tokens":2048,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":125,"Rank":165,"Model":"cloudy-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":66.04,"Retrieval Average (8 datasets)":72.92,"STS Average (8 datasets)":""} -{"index":137,"Rank":176,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":140,"Rank":179,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":144,"Rank":182,"Model":"bge-reranker-large-onnx<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":67.03,"Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":145,"Rank":183,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Embedding Dimensions":768,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":146,"Rank":184,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":147,"Rank":185,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":150,"Rank":188,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":158,"Rank":194,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":160,"Rank":195,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":161,"Rank":196,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":162,"Rank":197,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":163,"Rank":198,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":164,"Rank":199,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":165,"Rank":200,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":"N\/A","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":166,"Rank":201,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":167,"Rank":202,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":176,"Rank":205,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":177,"Rank":206,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":178,"Rank":207,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":179,"Rank":208,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":181,"Rank":209,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":189,"Rank":213,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":43.24,"PairClassification Average (2 datasets)":76.59,"Reranking Average (4 datasets)":57.68,"Retrieval Average (8 datasets)":59.77,"STS Average (8 datasets)":""} -{"index":191,"Rank":214,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":42.32,"PairClassification Average (2 datasets)":78.65,"Reranking Average (4 datasets)":59.11,"Retrieval Average (8 datasets)":62.87,"STS Average (8 datasets)":""} -{"index":197,"Rank":220,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Embedding Dimensions":768,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":208,"Rank":228,"Model":"lim_base_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":70.83,"Clustering Average (4 datasets)":48.35,"PairClassification Average (2 datasets)":74.64,"Reranking Average (4 datasets)":63.73,"Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":223,"Rank":241,"Model":"LdIR-reranker-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":68.58,"Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":224,"Rank":242,"Model":"bge-reranker-base-1k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":67.02,"Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":225,"Rank":243,"Model":"bge-reranker-large-1k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":67.87,"Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":235,"Rank":250,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Embedding Dimensions":768,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":238,"Rank":253,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":128,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":239,"Rank":254,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":256,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":240,"Rank":255,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":512,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":241,"Rank":256,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":64,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":243,"Rank":258,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":244,"Rank":259,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":245,"Rank":260,"Model":"checkpoint-9000<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":70.63,"STS Average (8 datasets)":""} -{"index":246,"Rank":261,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":247,"Rank":262,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":248,"Rank":263,"Model":"360Zhinao-1.8B-Reranking<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":70.13,"Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":249,"Rank":264,"Model":"360Zhinao-search<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":68.92,"Retrieval Average (8 datasets)":75.06,"STS Average (8 datasets)":""} -{"index":254,"Rank":266,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":255,"Rank":267,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":256,"Rank":268,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":257,"Rank":269,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":258,"Rank":270,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":259,"Rank":271,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Embedding Dimensions":300,"Max Tokens":"N\/A","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":260,"Rank":272,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":300,"Max Tokens":"N\/A","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":261,"Rank":273,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":512,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":262,"Rank":274,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":263,"Rank":275,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":264,"Rank":276,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":265,"Rank":277,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":266,"Rank":278,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":267,"Rank":279,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":268,"Rank":280,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":269,"Rank":281,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":270,"Rank":282,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":271,"Rank":283,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":272,"Rank":284,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":273,"Rank":285,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":282,"Rank":289,"Model":"BAAI-bge-reranker-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":67.78,"Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":293,"Rank":296,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":512,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":294,"Rank":297,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":512,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":298,"Rank":301,"Model":"hktv-fine-tuned-cloudy-large-zh-metaphor14<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":66.04,"Retrieval Average (8 datasets)":72.92,"STS Average (8 datasets)":""} -{"index":299,"Rank":302,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":300,"Rank":303,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":306,"Rank":309,"Model":"bi-cse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":57.91} -{"index":307,"Rank":310,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":2046,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":308,"Rank":311,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":2048,"Max Tokens":2046,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":309,"Rank":312,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":4096,"Max Tokens":2046,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":310,"Rank":313,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":12288,"Max Tokens":2046,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":312,"Rank":314,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":256,"Max Tokens":8191,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":313,"Rank":315,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":3072,"Max Tokens":8191,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":314,"Rank":316,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":8191,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} -{"index":316,"Rank":317,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":30522,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (35 datasets)":58.87,"Classification Average (9 datasets)":69.83,"Clustering Average (4 datasets)":48.23,"PairClassification Average (2 datasets)":69.89,"Reranking Average (4 datasets)":53.24,"Retrieval Average (8 datasets)":63.66,"STS Average (8 datasets)":47.05} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (35 datasets)":56.45,"Classification Average (9 datasets)":67.71,"Clustering Average (4 datasets)":40.68,"PairClassification Average (2 datasets)":67.07,"Reranking Average (4 datasets)":52.13,"Retrieval Average (8 datasets)":61.61,"STS Average (8 datasets)":44.9} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (35 datasets)":55.88,"Classification Average (9 datasets)":68.08,"Clustering Average (4 datasets)":45.26,"PairClassification Average (2 datasets)":66.45,"Reranking Average (4 datasets)":51.85,"Retrieval Average (8 datasets)":59.94,"STS Average (8 datasets)":43.1} +{"Rank":4,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Embedding Dimensions":256,"Max Tokens":2048,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":5,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Embedding Dimensions":768,"Max Tokens":2048,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":6,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":8000,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":8,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":1024,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":9,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":16000,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":10,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":16000,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":16000,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":12,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":4000,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":13,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Embedding Dimensions":1024,"Max Tokens":4000,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":14,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":32000,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":15,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Embedding Dimensions":1024,"Max Tokens":"N\/A","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":16,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1792,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":53.75,"PairClassification Average (2 datasets)":88.1,"Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":17,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":67.08,"PairClassification Average (2 datasets)":88.52,"Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":18,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":47.53,"PairClassification Average (2 datasets)":79.76,"Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":19,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":50.01,"PairClassification Average (2 datasets)":76.77,"Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":20,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":48.99,"PairClassification Average (2 datasets)":81.6,"Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":21,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":22,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":512,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":44.18,"PairClassification Average (2 datasets)":70.4,"Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":23,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":384,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":24,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":25,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":26,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":27,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":28,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":30.02,"PairClassification Average (2 datasets)":70.86,"Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":29,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":30,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":31,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":32,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Embedding Dimensions":"","Max Tokens":4096,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":33,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Embedding Dimensions":4096,"Max Tokens":4096,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":34,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Embedding Dimensions":4096,"Max Tokens":4096,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":35,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Embedding Dimensions":4096,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":36,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Embedding Dimensions":4096,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":37,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":38,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":39,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Embedding Dimensions":2048,"Max Tokens":4096,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":40,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Embedding Dimensions":2048,"Max Tokens":4096,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":41,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Embedding Dimensions":2048,"Max Tokens":2048,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":42,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":514,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":43,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":44,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":45,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":46,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Embedding Dimensions":512,"Max Tokens":514,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":47,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":2048,"Max Tokens":514,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":48,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":49,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":50,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":51,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":52,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":53,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":54,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":55,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":56,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":57,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":58,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":59,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":60,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":61,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":2048,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":62,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":2048,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":63,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Embedding Dimensions":768,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":64,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":47.68,"PairClassification Average (2 datasets)":63.99,"Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":65,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":48.88,"PairClassification Average (2 datasets)":64.3,"Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":66,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Embedding Dimensions":768,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":67,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":128,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":68,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":256,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":69,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":512,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":70,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":64,"Max Tokens":8192,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":71,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":72,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":73,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":74,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":75,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":76,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":77,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":78,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":79,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Embedding Dimensions":300,"Max Tokens":"N\/A","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":80,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":300,"Max Tokens":"N\/A","Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":81,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":512,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":82,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":83,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":84,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":85,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":86,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":87,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":88,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":89,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":90,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":91,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":92,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":93,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":94,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":95,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":312,"Max Tokens":2048,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":96,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":37.66,"PairClassification Average (2 datasets)":67.41,"Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":97,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Embedding Dimensions":768,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":44.39,"PairClassification Average (2 datasets)":66.62,"Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":99,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":512,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":100,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":512,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":101,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":102,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":103,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":2046,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":104,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":2048,"Max Tokens":2046,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":105,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":4096,"Max Tokens":2046,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":106,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":12288,"Max Tokens":2046,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":107,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":8191,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":45.68,"PairClassification Average (2 datasets)":69.56,"Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":108,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":256,"Max Tokens":8191,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":109,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":3072,"Max Tokens":8191,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":110,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":8191,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":111,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":56.88,"PairClassification Average (2 datasets)":82.32,"Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} +{"Rank":112,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":30522,"Max Tokens":512,"Average (35 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (4 datasets)":"","PairClassification Average (2 datasets)":"","Reranking Average (4 datasets)":"","Retrieval Average (8 datasets)":"","STS Average (8 datasets)":""} diff --git a/boards_data/zh/data_tasks/Classification/default.jsonl b/boards_data/zh/data_tasks/Classification/default.jsonl index 83779f32c24690ac8b8446cbcc81301f4583dd39..0c47d2212c9f31ddfbd2d9f9cc0525de5196f5d7 100644 --- a/boards_data/zh/data_tasks/Classification/default.jsonl +++ b/boards_data/zh/data_tasks/Classification/default.jsonl @@ -1,88 +1,112 @@ -{"level_0":0,"index":17,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":75.09,"AmazonReviewsClassification (zh)":53.98,"IFlyTek":54.52,"JDReview":86.51,"MassiveIntentClassification (zh-CN)":81.09,"MassiveScenarioClassification (zh-CN)":86.06,"MultilingualSentiment":76.88,"OnlineShopping":94.3,"TNews":52.97,"Waimai":89.47} -{"level_0":1,"index":234,"Rank":2,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.09,"AmazonReviewsClassification (zh)":53.98,"IFlyTek":54.52,"JDReview":86.51,"MassiveIntentClassification (zh-CN)":81.09,"MassiveScenarioClassification (zh-CN)":86.06,"MultilingualSentiment":76.88,"OnlineShopping":94.3,"TNews":52.97,"Waimai":89.47} -{"level_0":2,"index":142,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.09,"AmazonReviewsClassification (zh)":53.98,"IFlyTek":54.52,"JDReview":86.51,"MassiveIntentClassification (zh-CN)":81.09,"MassiveScenarioClassification (zh-CN)":86.06,"MultilingualSentiment":76.88,"OnlineShopping":94.3,"TNews":52.97,"Waimai":89.47} -{"level_0":3,"index":207,"Rank":4,"Model":"xiaobu-embedding-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.67,"AmazonReviewsClassification (zh)":50.07,"IFlyTek":51.76,"JDReview":89.08,"MassiveIntentClassification (zh-CN)":77.45,"MassiveScenarioClassification (zh-CN)":85.3,"MultilingualSentiment":79.45,"OnlineShopping":94.9,"TNews":54.64,"Waimai":89.34} -{"level_0":4,"index":253,"Rank":5,"Model":"piccolo-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.59,"AmazonReviewsClassification (zh)":49.44,"IFlyTek":52.1,"JDReview":88.57,"MassiveIntentClassification (zh-CN)":77.71,"MassiveScenarioClassification (zh-CN)":85.63,"MultilingualSentiment":79.09,"OnlineShopping":94.62,"TNews":54.52,"Waimai":89.59} -{"level_0":5,"index":169,"Rank":6,"Model":"zpoint_large_embedding_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.43,"AmazonReviewsClassification (zh)":49.72,"IFlyTek":51.7,"JDReview":88.87,"MassiveIntentClassification (zh-CN)":76.79,"MassiveScenarioClassification (zh-CN)":84.96,"MultilingualSentiment":79.22,"OnlineShopping":94.88,"TNews":54.35,"Waimai":89.36} -{"level_0":6,"index":38,"Rank":7,"Model":"Yinka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.3,"AmazonReviewsClassification (zh)":49.5,"IFlyTek":51.77,"JDReview":88.48,"MassiveIntentClassification (zh-CN)":76.96,"MassiveScenarioClassification (zh-CN)":84.64,"MultilingualSentiment":78.97,"OnlineShopping":94.56,"TNews":54.39,"Waimai":89.42} -{"level_0":7,"index":50,"Rank":8,"Model":"IYun-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.18,"AmazonReviewsClassification (zh)":49.67,"IFlyTek":52.64,"JDReview":87.6,"MassiveIntentClassification (zh-CN)":77.43,"MassiveScenarioClassification (zh-CN)":83.87,"MultilingualSentiment":78.48,"OnlineShopping":94.34,"TNews":54.37,"Waimai":89.23} -{"level_0":8,"index":276,"Rank":9,"Model":"AGE_Hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.14,"AmazonReviewsClassification (zh)":49.68,"IFlyTek":51.77,"JDReview":86.94,"MassiveIntentClassification (zh-CN)":80.6,"MassiveScenarioClassification (zh-CN)":87.42,"MultilingualSentiment":75.92,"OnlineShopping":94.03,"TNews":52.69,"Waimai":88.23} -{"level_0":9,"index":29,"Rank":10,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.93,"AmazonReviewsClassification (zh)":53.0,"IFlyTek":49.94,"JDReview":88.91,"MassiveIntentClassification (zh-CN)":78.96,"MassiveScenarioClassification (zh-CN)":81.54,"MultilingualSentiment":78.91,"OnlineShopping":94.59,"TNews":50.26,"Waimai":89.26} -{"level_0":10,"index":15,"Rank":11,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":73.36,"AmazonReviewsClassification (zh)":52.95,"IFlyTek":53.77,"JDReview":88.2,"MassiveIntentClassification (zh-CN)":76.25,"MassiveScenarioClassification (zh-CN)":77.26,"MultilingualSentiment":77.42,"OnlineShopping":94.48,"TNews":51.24,"Waimai":88.63} -{"level_0":11,"index":315,"Rank":12,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.84,"AmazonReviewsClassification (zh)":48.3,"IFlyTek":50.75,"JDReview":87.69,"MassiveIntentClassification (zh-CN)":74.91,"MassiveScenarioClassification (zh-CN)":81.28,"MultilingualSentiment":76.83,"OnlineShopping":94.42,"TNews":52.62,"Waimai":88.77} -{"level_0":12,"index":129,"Rank":13,"Model":"acge_text_embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.75,"AmazonReviewsClassification (zh)":48.54,"IFlyTek":51.77,"JDReview":86.7,"MassiveIntentClassification (zh-CN)":75.87,"MassiveScenarioClassification (zh-CN)":78.44,"MultilingualSentiment":77.53,"OnlineShopping":93.71,"TNews":53.69,"Waimai":88.53} -{"level_0":13,"index":12,"Rank":14,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.74,"AmazonReviewsClassification (zh)":46.18,"IFlyTek":51.8,"JDReview":86.02,"MassiveIntentClassification (zh-CN)":73.85,"MassiveScenarioClassification (zh-CN)":77.13,"MultilingualSentiment":76.35,"OnlineShopping":93.2,"TNews":53.06,"Waimai":88.1} -{"level_0":14,"index":116,"Rank":15,"Model":"stella-mrl-large-zh-v3.5-1792d-1024<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.56,"AmazonReviewsClassification (zh)":46.59,"IFlyTek":50.74,"JDReview":86.1,"MassiveIntentClassification (zh-CN)":73.28,"MassiveScenarioClassification (zh-CN)":76.2,"MultilingualSentiment":76.64,"OnlineShopping":93.39,"TNews":53.1,"Waimai":88.01} -{"level_0":15,"index":155,"Rank":16,"Model":"stella-mrl-large-zh-v3.5-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.56,"AmazonReviewsClassification (zh)":46.59,"IFlyTek":50.74,"JDReview":86.1,"MassiveIntentClassification (zh-CN)":73.28,"MassiveScenarioClassification (zh-CN)":76.2,"MultilingualSentiment":76.64,"OnlineShopping":93.39,"TNews":53.1,"Waimai":88.01} -{"level_0":16,"index":154,"Rank":17,"Model":"stella-large-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.5,"AmazonReviewsClassification (zh)":46.32,"IFlyTek":50.63,"JDReview":86.12,"MassiveIntentClassification (zh-CN)":73.28,"MassiveScenarioClassification (zh-CN)":76.3,"MultilingualSentiment":76.45,"OnlineShopping":93.24,"TNews":53.11,"Waimai":88.08} -{"level_0":17,"index":105,"Rank":18,"Model":"alime-embedding-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.35,"AmazonReviewsClassification (zh)":46.95,"IFlyTek":49.67,"JDReview":86.53,"MassiveIntentClassification (zh-CN)":73.39,"MassiveScenarioClassification (zh-CN)":76.04,"MultilingualSentiment":76.4,"OnlineShopping":92.6,"TNews":52.04,"Waimai":88.49} -{"level_0":18,"index":286,"Rank":19,"Model":"gte-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.34,"AmazonReviewsClassification (zh)":47.23,"IFlyTek":49.6,"JDReview":86.72,"MassiveIntentClassification (zh-CN)":73.31,"MassiveScenarioClassification (zh-CN)":75.71,"MultilingualSentiment":76.48,"OnlineShopping":92.68,"TNews":51.98,"Waimai":88.37} -{"level_0":19,"index":284,"Rank":20,"Model":"gte-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.26,"AmazonReviewsClassification (zh)":45.82,"IFlyTek":48.62,"JDReview":85.95,"MassiveIntentClassification (zh-CN)":73.32,"MassiveScenarioClassification (zh-CN)":76.79,"MultilingualSentiment":75.79,"OnlineShopping":93.36,"TNews":53.65,"Waimai":88.07} -{"level_0":20,"index":206,"Rank":21,"Model":"xiaobu-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.2,"AmazonReviewsClassification (zh)":46.72,"IFlyTek":49.74,"JDReview":86.74,"MassiveIntentClassification (zh-CN)":72.87,"MassiveScenarioClassification (zh-CN)":75.47,"MultilingualSentiment":76.28,"OnlineShopping":92.49,"TNews":52.16,"Waimai":88.36} -{"level_0":21,"index":173,"Rank":22,"Model":"stella-base-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.12,"AmazonReviewsClassification (zh)":46.34,"IFlyTek":49.93,"JDReview":86.27,"MassiveIntentClassification (zh-CN)":72.05,"MassiveScenarioClassification (zh-CN)":75.3,"MultilingualSentiment":76.22,"OnlineShopping":93.53,"TNews":53.2,"Waimai":87.27} -{"level_0":22,"index":16,"Rank":23,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.12,"AmazonReviewsClassification (zh)":47.21,"IFlyTek":44.85,"JDReview":85.82,"MassiveIntentClassification (zh-CN)":76.88,"MassiveScenarioClassification (zh-CN)":80.76,"MultilingualSentiment":74.46,"OnlineShopping":93.5,"TNews":49.95,"Waimai":86.63} -{"level_0":23,"index":233,"Rank":24,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.12,"AmazonReviewsClassification (zh)":47.21,"IFlyTek":44.85,"JDReview":85.82,"MassiveIntentClassification (zh-CN)":76.88,"MassiveScenarioClassification (zh-CN)":80.76,"MultilingualSentiment":74.46,"OnlineShopping":93.5,"TNews":49.95,"Waimai":86.63} -{"level_0":24,"index":208,"Rank":25,"Model":"lim_base_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.83,"AmazonReviewsClassification (zh)":46.67,"IFlyTek":47.34,"JDReview":85.67,"MassiveIntentClassification (zh-CN)":72.38,"MassiveScenarioClassification (zh-CN)":74.87,"MultilingualSentiment":76.27,"OnlineShopping":93.05,"TNews":53.27,"Waimai":87.96} -{"level_0":25,"index":180,"Rank":26,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":70.17,"AmazonReviewsClassification (zh)":46.24,"IFlyTek":45.05,"JDReview":85.82,"MassiveIntentClassification (zh-CN)":74.99,"MassiveScenarioClassification (zh-CN)":79.93,"MultilingualSentiment":73.31,"OnlineShopping":92.19,"TNews":47.05,"Waimai":86.94} -{"level_0":26,"index":46,"Rank":27,"Model":"Dmeta-embedding-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.0,"AmazonReviewsClassification (zh)":44.93,"IFlyTek":48.3,"JDReview":85.07,"MassiveIntentClassification (zh-CN)":71.16,"MassiveScenarioClassification (zh-CN)":73.54,"MultilingualSentiment":75.16,"OnlineShopping":93.25,"TNews":52.41,"Waimai":86.21} -{"level_0":27,"index":47,"Rank":28,"Model":"Dmeta-embedding-zh-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.89,"AmazonReviewsClassification (zh)":44.88,"IFlyTek":49.11,"JDReview":85.57,"MassiveIntentClassification (zh-CN)":70.78,"MassiveScenarioClassification (zh-CN)":73.16,"MultilingualSentiment":74.39,"OnlineShopping":93.12,"TNews":51.8,"Waimai":86.18} -{"level_0":28,"index":27,"Rank":29,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":69.13,"AmazonReviewsClassification (zh)":41.38,"IFlyTek":48.74,"JDReview":85.14,"MassiveIntentClassification (zh-CN)":68.84,"MassiveScenarioClassification (zh-CN)":74.7,"MultilingualSentiment":72.97,"OnlineShopping":91.43,"TNews":52.1,"Waimai":86.9} -{"level_0":29,"index":41,"Rank":30,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.09,"AmazonReviewsClassification (zh)":43.32,"IFlyTek":47.08,"JDReview":84.48,"MassiveIntentClassification (zh-CN)":70.91,"MassiveScenarioClassification (zh-CN)":74.94,"MultilingualSentiment":72.68,"OnlineShopping":92.11,"TNews":49.85,"Waimai":86.44} -{"level_0":30,"index":20,"Rank":31,"Model":"tao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.05,"AmazonReviewsClassification (zh)":40.81,"IFlyTek":48.01,"JDReview":87.02,"MassiveIntentClassification (zh-CN)":68.27,"MassiveScenarioClassification (zh-CN)":73.13,"MultilingualSentiment":73.4,"OnlineShopping":91.82,"TNews":51.93,"Waimai":87.1} -{"level_0":31,"index":175,"Rank":32,"Model":"stella-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.05,"AmazonReviewsClassification (zh)":40.82,"IFlyTek":47.99,"JDReview":86.98,"MassiveIntentClassification (zh-CN)":68.26,"MassiveScenarioClassification (zh-CN)":73.12,"MultilingualSentiment":73.41,"OnlineShopping":91.81,"TNews":51.93,"Waimai":87.12} -{"level_0":32,"index":21,"Rank":33,"Model":"tao-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.05,"AmazonReviewsClassification (zh)":40.8,"IFlyTek":47.99,"JDReview":87.02,"MassiveIntentClassification (zh-CN)":68.26,"MassiveScenarioClassification (zh-CN)":73.13,"MultilingualSentiment":73.39,"OnlineShopping":91.81,"TNews":51.93,"Waimai":87.1} -{"level_0":33,"index":226,"Rank":34,"Model":"m3e-ernie-xbase-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.83,"AmazonReviewsClassification (zh)":47.79,"IFlyTek":44.86,"JDReview":88.48,"MassiveIntentClassification (zh-CN)":61.29,"MassiveScenarioClassification (zh-CN)":66.9,"MultilingualSentiment":78.46,"OnlineShopping":92.58,"TNews":50.02,"Waimai":89.09} -{"level_0":34,"index":26,"Rank":35,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":68.58,"AmazonReviewsClassification (zh)":41.94,"IFlyTek":45.32,"JDReview":85.38,"MassiveIntentClassification (zh-CN)":66.96,"MassiveScenarioClassification (zh-CN)":73.39,"MultilingualSentiment":73.7,"OnlineShopping":91.66,"TNews":52.05,"Waimai":86.83} -{"level_0":35,"index":172,"Rank":36,"Model":"stella-base-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.29,"AmazonReviewsClassification (zh)":39.64,"IFlyTek":47.9,"JDReview":84.78,"MassiveIntentClassification (zh-CN)":68.09,"MassiveScenarioClassification (zh-CN)":73.22,"MultilingualSentiment":71.67,"OnlineShopping":91.38,"TNews":51.25,"Waimai":86.68} -{"level_0":36,"index":51,"Rank":37,"Model":"mist-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.21,"AmazonReviewsClassification (zh)":39.32,"IFlyTek":47.96,"JDReview":84.9,"MassiveIntentClassification (zh-CN)":67.65,"MassiveScenarioClassification (zh-CN)":72.97,"MultilingualSentiment":71.54,"OnlineShopping":91.3,"TNews":51.56,"Waimai":86.66} -{"level_0":37,"index":221,"Rank":38,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":68.2,"AmazonReviewsClassification (zh)":44.44,"IFlyTek":43.96,"JDReview":86.92,"MassiveIntentClassification (zh-CN)":67.23,"MassiveScenarioClassification (zh-CN)":74.88,"MultilingualSentiment":72.47,"OnlineShopping":89.59,"TNews":48.26,"Waimai":86.08} -{"level_0":38,"index":23,"Rank":39,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":68.07,"AmazonReviewsClassification (zh)":40.15,"IFlyTek":48.62,"JDReview":83.62,"MassiveIntentClassification (zh-CN)":67.93,"MassiveScenarioClassification (zh-CN)":73.98,"MultilingualSentiment":70.67,"OnlineShopping":91.26,"TNews":51.08,"Waimai":85.36} -{"level_0":39,"index":40,"Rank":40,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.89,"AmazonReviewsClassification (zh)":42.04,"IFlyTek":46.3,"JDReview":77.3,"MassiveIntentClassification (zh-CN)":70.82,"MassiveScenarioClassification (zh-CN)":75.21,"MultilingualSentiment":71.67,"OnlineShopping":91.53,"TNews":50.17,"Waimai":85.98} -{"level_0":40,"index":171,"Rank":41,"Model":"stella-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.77,"AmazonReviewsClassification (zh)":40.25,"IFlyTek":47.46,"JDReview":84.99,"MassiveIntentClassification (zh-CN)":65.56,"MassiveScenarioClassification (zh-CN)":71.49,"MultilingualSentiment":72.48,"OnlineShopping":90.34,"TNews":50.78,"Waimai":86.62} -{"level_0":41,"index":174,"Rank":42,"Model":"stella-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.62,"AmazonReviewsClassification (zh)":40.57,"IFlyTek":47.03,"JDReview":86.74,"MassiveIntentClassification (zh-CN)":64.02,"MassiveScenarioClassification (zh-CN)":68.95,"MultilingualSentiment":73.03,"OnlineShopping":90.75,"TNews":50.69,"Waimai":86.77} -{"level_0":42,"index":220,"Rank":43,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":67.52,"AmazonReviewsClassification (zh)":43.02,"IFlyTek":44.42,"JDReview":85.33,"MassiveIntentClassification (zh-CN)":68.4,"MassiveScenarioClassification (zh-CN)":74.6,"MultilingualSentiment":71.9,"OnlineShopping":87.77,"TNews":48.28,"Waimai":83.99} -{"level_0":43,"index":184,"Rank":44,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":67.34,"AmazonReviewsClassification (zh)":38.83,"IFlyTek":45.47,"JDReview":80.99,"MassiveIntentClassification (zh-CN)":71.12,"MassiveScenarioClassification (zh-CN)":76.83,"MultilingualSentiment":68.58,"OnlineShopping":90.81,"TNews":48.38,"Waimai":85.02} -{"level_0":44,"index":39,"Rank":45,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.08,"AmazonReviewsClassification (zh)":39.68,"IFlyTek":47.0,"JDReview":77.34,"MassiveIntentClassification (zh-CN)":70.98,"MassiveScenarioClassification (zh-CN)":75.69,"MultilingualSentiment":69.17,"OnlineShopping":89.79,"TNews":49.77,"Waimai":84.34} -{"level_0":45,"index":252,"Rank":46,"Model":"piccolo-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.03,"AmazonReviewsClassification (zh)":40.33,"IFlyTek":44.25,"JDReview":86.1,"MassiveIntentClassification (zh-CN)":68.0,"MassiveScenarioClassification (zh-CN)":72.08,"MultilingualSentiment":70.15,"OnlineShopping":90.27,"TNews":46.54,"Waimai":85.54} -{"level_0":46,"index":251,"Rank":47,"Model":"piccolo-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.98,"AmazonReviewsClassification (zh)":40.24,"IFlyTek":44.35,"JDReview":84.26,"MassiveIntentClassification (zh-CN)":68.97,"MassiveScenarioClassification (zh-CN)":73.32,"MultilingualSentiment":70.25,"OnlineShopping":89.93,"TNews":46.81,"Waimai":84.7} -{"level_0":47,"index":186,"Rank":48,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":65.85,"AmazonReviewsClassification (zh)":37.5,"IFlyTek":47.35,"JDReview":79.34,"MassiveIntentClassification (zh-CN)":68.24,"MassiveScenarioClassification (zh-CN)":74.47,"MultilingualSentiment":64.74,"OnlineShopping":88.73,"TNews":48.38,"Waimai":83.9} -{"level_0":48,"index":183,"Rank":49,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":65.35,"AmazonReviewsClassification (zh)":37.23,"IFlyTek":44.93,"JDReview":76.21,"MassiveIntentClassification (zh-CN)":69.16,"MassiveScenarioClassification (zh-CN)":75.42,"MultilingualSentiment":65.28,"OnlineShopping":88.4,"TNews":47.06,"Waimai":84.42} -{"level_0":49,"index":188,"Rank":50,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.31,"AmazonReviewsClassification (zh)":38.12,"IFlyTek":43.52,"JDReview":81.09,"MassiveIntentClassification (zh-CN)":66.91,"MassiveScenarioClassification (zh-CN)":74.72,"MultilingualSentiment":66.33,"OnlineShopping":88.35,"TNews":46.08,"Waimai":82.67} -{"level_0":50,"index":280,"Rank":51,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.94,"AmazonReviewsClassification (zh)":34.94,"IFlyTek":47.36,"JDReview":79.57,"MassiveIntentClassification (zh-CN)":68.2,"MassiveScenarioClassification (zh-CN)":71.93,"MultilingualSentiment":63.29,"OnlineShopping":87.0,"TNews":47.65,"Waimai":84.54} -{"level_0":51,"index":199,"Rank":52,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.94,"AmazonReviewsClassification (zh)":34.94,"IFlyTek":47.36,"JDReview":79.57,"MassiveIntentClassification (zh-CN)":68.2,"MassiveScenarioClassification (zh-CN)":71.93,"MultilingualSentiment":63.29,"OnlineShopping":87.0,"TNews":47.65,"Waimai":84.54} -{"level_0":52,"index":288,"Rank":53,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.49,"AmazonReviewsClassification (zh)":38.69,"IFlyTek":41.15,"JDReview":82.83,"MassiveIntentClassification (zh-CN)":59.28,"MassiveScenarioClassification (zh-CN)":66.48,"MultilingualSentiment":68.2,"OnlineShopping":89.13,"TNews":49.65,"Waimai":84.96} -{"level_0":53,"index":274,"Rank":54,"Model":"ZNV-Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.37,"AmazonReviewsClassification (zh)":29.75,"IFlyTek":50.93,"JDReview":84.93,"MassiveIntentClassification (zh-CN)":67.46,"MassiveScenarioClassification (zh-CN)":71.6,"MultilingualSentiment":55.9,"OnlineShopping":82.53,"TNews":55.08,"Waimai":81.16} -{"level_0":54,"index":311,"Rank":55,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.31,"AmazonReviewsClassification (zh)":38.3,"IFlyTek":44.62,"JDReview":74.6,"MassiveIntentClassification (zh-CN)":64.81,"MassiveScenarioClassification (zh-CN)":71.4,"MultilingualSentiment":67.99,"OnlineShopping":88.94,"TNews":45.77,"Waimai":82.37} -{"level_0":55,"index":33,"Rank":56,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":63.96,"AmazonReviewsClassification (zh)":35.91,"IFlyTek":45.49,"JDReview":80.04,"MassiveIntentClassification (zh-CN)":63.95,"MassiveScenarioClassification (zh-CN)":70.8,"MultilingualSentiment":63.06,"OnlineShopping":85.05,"TNews":48.15,"Waimai":83.18} -{"level_0":56,"index":190,"Rank":57,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.85,"AmazonReviewsClassification (zh)":35.7,"IFlyTek":40.46,"JDReview":78.26,"MassiveIntentClassification (zh-CN)":63.75,"MassiveScenarioClassification (zh-CN)":72.39,"MultilingualSentiment":63.17,"OnlineShopping":87.11,"TNews":44.15,"Waimai":80.65} -{"level_0":57,"index":49,"Rank":58,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":62.7,"AmazonReviewsClassification (zh)":38.25,"IFlyTek":43.13,"JDReview":69.08,"MassiveIntentClassification (zh-CN)":61.23,"MassiveScenarioClassification (zh-CN)":68.12,"MultilingualSentiment":67.83,"OnlineShopping":88.13,"TNews":44.42,"Waimai":84.15} -{"level_0":58,"index":48,"Rank":59,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.54,"AmazonReviewsClassification (zh)":37.51,"IFlyTek":44.88,"JDReview":82.2,"MassiveIntentClassification (zh-CN)":57.34,"MassiveScenarioClassification (zh-CN)":62.36,"MultilingualSentiment":66.58,"OnlineShopping":88.19,"TNews":39.8,"Waimai":83.96} -{"level_0":59,"index":277,"Rank":60,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":62.19,"AmazonReviewsClassification (zh)":34.12,"IFlyTek":42.05,"JDReview":82.14,"MassiveIntentClassification (zh-CN)":63.98,"MassiveScenarioClassification (zh-CN)":70.52,"MultilingualSentiment":60.98,"OnlineShopping":85.69,"TNews":43.01,"Waimai":77.22} -{"level_0":60,"index":279,"Rank":61,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":61.0,"AmazonReviewsClassification (zh)":34.46,"IFlyTek":41.75,"JDReview":79.68,"MassiveIntentClassification (zh-CN)":57.47,"MassiveScenarioClassification (zh-CN)":65.32,"MultilingualSentiment":61.21,"OnlineShopping":84.3,"TNews":45.22,"Waimai":79.57} -{"level_0":61,"index":53,"Rank":62,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":60.66,"AmazonReviewsClassification (zh)":33.77,"IFlyTek":41.54,"JDReview":81.56,"MassiveIntentClassification (zh-CN)":63.23,"MassiveScenarioClassification (zh-CN)":68.45,"MultilingualSentiment":58.97,"OnlineShopping":83.51,"TNews":38.92,"Waimai":76.01} -{"level_0":62,"index":159,"Rank":63,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.87,"AmazonReviewsClassification (zh)":31.91,"IFlyTek":38.01,"JDReview":69.59,"MassiveIntentClassification (zh-CN)":62.08,"MassiveScenarioClassification (zh-CN)":68.88,"MultilingualSentiment":57.69,"OnlineShopping":75.64,"TNews":40.95,"Waimai":76.12} -{"level_0":63,"index":289,"Rank":64,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":38.04,"AmazonReviewsClassification (zh)":21.96,"IFlyTek":20.35,"JDReview":55.5,"MassiveIntentClassification (zh-CN)":25.39,"MassiveScenarioClassification (zh-CN)":40.35,"MultilingualSentiment":38.94,"OnlineShopping":56.89,"TNews":26.79,"Waimai":56.22} -{"level_0":64,"index":11,"Rank":76,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","AmazonReviewsClassification (zh)":30.89,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":40.62,"MassiveScenarioClassification (zh-CN)":50.22,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":65,"index":67,"Rank":110,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":22.35,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":18.85,"MassiveScenarioClassification (zh-CN)":30.14,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":66,"index":91,"Rank":134,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":24.27,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":46.24,"MassiveScenarioClassification (zh-CN)":49.38,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":67,"index":92,"Rank":135,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":23.98,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":45.78,"MassiveScenarioClassification (zh-CN)":48.55,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":68,"index":95,"Rank":138,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":33.75,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":"","MassiveScenarioClassification (zh-CN)":"","MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":69,"index":126,"Rank":167,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":36.07,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":63.4,"MassiveScenarioClassification (zh-CN)":69.58,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":70,"index":138,"Rank":178,"Model":"sgpt-bloom-1b7-nli<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":37.63,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":"","MassiveScenarioClassification (zh-CN)":"","MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":71,"index":139,"Rank":179,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":32.63,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":67.07,"MassiveScenarioClassification (zh-CN)":73.95,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":72,"index":185,"Rank":212,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":44.66,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":73.32,"MassiveScenarioClassification (zh-CN)":78.7,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":73,"index":189,"Rank":214,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":"","IFlyTek":45.03,"JDReview":83.64,"MassiveIntentClassification (zh-CN)":"","MassiveScenarioClassification (zh-CN)":"","MultilingualSentiment":67.17,"OnlineShopping":90.41,"TNews":47.7,"Waimai":84.07} -{"level_0":74,"index":191,"Rank":215,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":"","IFlyTek":46.23,"JDReview":84.17,"MassiveIntentClassification (zh-CN)":"","MassiveScenarioClassification (zh-CN)":"","MultilingualSentiment":68.36,"OnlineShopping":90.93,"TNews":47.8,"Waimai":84.68} -{"level_0":75,"index":254,"Rank":266,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","AmazonReviewsClassification (zh)":36.45,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":63.86,"MassiveScenarioClassification (zh-CN)":70.85,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":76,"index":255,"Rank":267,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AmazonReviewsClassification (zh)":22.99,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":23.74,"MassiveScenarioClassification (zh-CN)":33.18,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":77,"index":258,"Rank":270,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonReviewsClassification (zh)":20.49,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":2.81,"MassiveScenarioClassification (zh-CN)":9.19,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":78,"index":261,"Rank":273,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","AmazonReviewsClassification (zh)":33.89,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":59.22,"MassiveScenarioClassification (zh-CN)":66.44,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":79,"index":263,"Rank":275,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AmazonReviewsClassification (zh)":21.83,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":2.41,"MassiveScenarioClassification (zh-CN)":3.84,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":80,"index":264,"Rank":276,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AmazonReviewsClassification (zh)":21.89,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":1.72,"MassiveScenarioClassification (zh-CN)":5.21,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":81,"index":268,"Rank":280,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AmazonReviewsClassification (zh)":35.26,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":"","MassiveScenarioClassification (zh-CN)":"","MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":82,"index":269,"Rank":281,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonReviewsClassification (zh)":37.74,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":"","MassiveScenarioClassification (zh-CN)":"","MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":83,"index":270,"Rank":282,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonReviewsClassification (zh)":21.53,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":1.12,"MassiveScenarioClassification (zh-CN)":4.17,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":84,"index":271,"Rank":283,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AmazonReviewsClassification (zh)":22.12,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":1.09,"MassiveScenarioClassification (zh-CN)":4.7,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":85,"index":272,"Rank":284,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AmazonReviewsClassification (zh)":21.88,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":0.59,"MassiveScenarioClassification (zh-CN)":5.86,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":86,"index":275,"Rank":286,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":"","IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":5.15,"MassiveScenarioClassification (zh-CN)":10.56,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} -{"level_0":87,"index":278,"Rank":287,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonReviewsClassification (zh)":32.52,"IFlyTek":"","JDReview":"","MassiveIntentClassification (zh-CN)":60.86,"MassiveScenarioClassification (zh-CN)":65.83,"MultilingualSentiment":"","OnlineShopping":"","TNews":"","Waimai":""} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":69.83,"IFlyTek":45.47,"IFlyTek (cmn-Hans)":41.86,"JDReview":80.99,"JDReview (cmn-Hans)":80.54,"MultilingualSentiment":68.58,"MultilingualSentiment (cmn-Hans)":70.81,"OnlineShopping":90.81,"OnlineShopping (cmn-Hans)":90.45,"TNews":48.38,"TNews (cmn-Hans)":48.8,"Waimai":85.02,"Waimai (cmn-Hans)":86.3} +{"Rank":2,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":68.08,"IFlyTek":47.35,"IFlyTek (cmn-Hans)":40.74,"JDReview":79.34,"JDReview (cmn-Hans)":78.37,"MultilingualSentiment":64.74,"MultilingualSentiment (cmn-Hans)":66.0,"OnlineShopping":88.73,"OnlineShopping (cmn-Hans)":88.7,"TNews":48.38,"TNews (cmn-Hans)":46.6,"Waimai":83.9,"Waimai (cmn-Hans)":84.15} +{"Rank":3,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":67.71,"IFlyTek":44.93,"IFlyTek (cmn-Hans)":40.81,"JDReview":76.21,"JDReview (cmn-Hans)":75.72,"MultilingualSentiment":65.28,"MultilingualSentiment (cmn-Hans)":67.56,"OnlineShopping":88.4,"OnlineShopping (cmn-Hans)":88.66,"TNews":47.06,"TNews (cmn-Hans)":47.52,"Waimai":84.42,"Waimai (cmn-Hans)":85.98} +{"Rank":4,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":5,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":6,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":7,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":8,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":9,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":10,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":12,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":13,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":14,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":15,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":16,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":51.8,"IFlyTek (cmn-Hans)":null,"JDReview":86.02,"JDReview (cmn-Hans)":null,"MultilingualSentiment":76.35,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":93.2,"OnlineShopping (cmn-Hans)":null,"TNews":53.06,"TNews (cmn-Hans)":null,"Waimai":88.1,"Waimai (cmn-Hans)":null} +{"Rank":17,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":null,"IFlyTek":53.77,"IFlyTek (cmn-Hans)":null,"JDReview":88.2,"JDReview (cmn-Hans)":null,"MultilingualSentiment":77.42,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":94.48,"OnlineShopping (cmn-Hans)":null,"TNews":51.24,"TNews (cmn-Hans)":null,"Waimai":88.63,"Waimai (cmn-Hans)":null} +{"Rank":18,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"IFlyTek":48.62,"IFlyTek (cmn-Hans)":null,"JDReview":83.62,"JDReview (cmn-Hans)":null,"MultilingualSentiment":70.67,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":91.26,"OnlineShopping (cmn-Hans)":null,"TNews":51.08,"TNews (cmn-Hans)":null,"Waimai":85.36,"Waimai (cmn-Hans)":null} +{"Rank":19,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"IFlyTek":45.32,"IFlyTek (cmn-Hans)":null,"JDReview":85.38,"JDReview (cmn-Hans)":null,"MultilingualSentiment":73.7,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":91.66,"OnlineShopping (cmn-Hans)":null,"TNews":52.05,"TNews (cmn-Hans)":null,"Waimai":86.83,"Waimai (cmn-Hans)":null} +{"Rank":20,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"IFlyTek":48.74,"IFlyTek (cmn-Hans)":null,"JDReview":85.14,"JDReview (cmn-Hans)":null,"MultilingualSentiment":72.97,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":91.43,"OnlineShopping (cmn-Hans)":null,"TNews":52.1,"TNews (cmn-Hans)":null,"Waimai":86.9,"Waimai (cmn-Hans)":null} +{"Rank":21,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":22,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":null,"IFlyTek":45.49,"IFlyTek (cmn-Hans)":null,"JDReview":80.04,"JDReview (cmn-Hans)":null,"MultilingualSentiment":63.06,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":85.05,"OnlineShopping (cmn-Hans)":null,"TNews":48.15,"TNews (cmn-Hans)":null,"Waimai":83.18,"Waimai (cmn-Hans)":null} +{"Rank":23,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":24,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":25,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":26,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":27,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":28,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"IFlyTek":41.54,"IFlyTek (cmn-Hans)":null,"JDReview":81.56,"JDReview (cmn-Hans)":null,"MultilingualSentiment":58.97,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":83.51,"OnlineShopping (cmn-Hans)":null,"TNews":38.92,"TNews (cmn-Hans)":null,"Waimai":76.01,"Waimai (cmn-Hans)":null} +{"Rank":29,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":30,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":31,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":32,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":48.49,"JDReview":null,"JDReview (cmn-Hans)":84.02,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":68.13,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":86.99,"TNews":null,"TNews (cmn-Hans)":49.94,"Waimai":null,"Waimai (cmn-Hans)":84.92} +{"Rank":33,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":34,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":35,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":36,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":37,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":38,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":39,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":40,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":41,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":42,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":43,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":44,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":45,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":46,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":47,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":48,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":49,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":50,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":51,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":52,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":53,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":54,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":55,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":56,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":57,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":58,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":59,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":60,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":61,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":62,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":63,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":64,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"IFlyTek":44.42,"IFlyTek (cmn-Hans)":null,"JDReview":85.33,"JDReview (cmn-Hans)":null,"MultilingualSentiment":71.9,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":87.77,"OnlineShopping (cmn-Hans)":null,"TNews":48.28,"TNews (cmn-Hans)":null,"Waimai":83.99,"Waimai (cmn-Hans)":null} +{"Rank":65,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"IFlyTek":43.96,"IFlyTek (cmn-Hans)":null,"JDReview":86.92,"JDReview (cmn-Hans)":null,"MultilingualSentiment":72.47,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":89.59,"OnlineShopping (cmn-Hans)":null,"TNews":48.26,"TNews (cmn-Hans)":null,"Waimai":86.08,"Waimai (cmn-Hans)":null} +{"Rank":66,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":67,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":68,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":69,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":70,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":71,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":72,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":73,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":74,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":43.19,"JDReview":null,"JDReview (cmn-Hans)":79.14,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":64.6,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":85.63,"TNews":null,"TNews (cmn-Hans)":46.02,"Waimai":null,"Waimai (cmn-Hans)":82.85} +{"Rank":75,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":15.31,"JDReview":null,"JDReview (cmn-Hans)":59.57,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":40.52,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":58.65,"TNews":null,"TNews (cmn-Hans)":20.37,"Waimai":null,"Waimai (cmn-Hans)":63.48} +{"Rank":76,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":16.09,"JDReview":null,"JDReview (cmn-Hans)":59.98,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":41.28,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":57.74,"TNews":null,"TNews (cmn-Hans)":20.12,"Waimai":null,"Waimai (cmn-Hans)":62.72} +{"Rank":77,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":17.18,"JDReview":null,"JDReview (cmn-Hans)":60.19,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":41.2,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":56.94,"TNews":null,"TNews (cmn-Hans)":21.05,"Waimai":null,"Waimai (cmn-Hans)":63.31} +{"Rank":78,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":79,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":80,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":81,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":82,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":83,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":84,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":85,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":86,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":87,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":88,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":39.88,"JDReview":null,"JDReview (cmn-Hans)":70.26,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":61.9,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":84.89,"TNews":null,"TNews (cmn-Hans)":39.19,"Waimai":null,"Waimai (cmn-Hans)":82.27} +{"Rank":89,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":43.98,"JDReview":null,"JDReview (cmn-Hans)":70.34,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":66.49,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":87.75,"TNews":null,"TNews (cmn-Hans)":43.73,"Waimai":null,"Waimai (cmn-Hans)":83.97} +{"Rank":90,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":91,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":92,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":93,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":94,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":95,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":96,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"IFlyTek":42.05,"IFlyTek (cmn-Hans)":null,"JDReview":82.14,"JDReview (cmn-Hans)":null,"MultilingualSentiment":60.98,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":85.69,"OnlineShopping (cmn-Hans)":null,"TNews":43.01,"TNews (cmn-Hans)":null,"Waimai":77.22,"Waimai (cmn-Hans)":null} +{"Rank":97,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":null,"IFlyTek":41.75,"IFlyTek (cmn-Hans)":null,"JDReview":79.68,"JDReview (cmn-Hans)":null,"MultilingualSentiment":61.21,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":84.3,"OnlineShopping (cmn-Hans)":null,"TNews":45.22,"TNews (cmn-Hans)":null,"Waimai":79.57,"Waimai (cmn-Hans)":null} +{"Rank":98,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":99,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":100,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":101,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":102,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":103,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":104,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":105,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":106,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":107,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":44.62,"IFlyTek (cmn-Hans)":null,"JDReview":74.6,"JDReview (cmn-Hans)":null,"MultilingualSentiment":67.99,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":88.94,"OnlineShopping (cmn-Hans)":null,"TNews":45.77,"TNews (cmn-Hans)":null,"Waimai":82.37,"Waimai (cmn-Hans)":null} +{"Rank":108,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":109,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":110,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} +{"Rank":111,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"IFlyTek":50.75,"IFlyTek (cmn-Hans)":null,"JDReview":87.69,"JDReview (cmn-Hans)":null,"MultilingualSentiment":76.83,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":94.42,"OnlineShopping (cmn-Hans)":null,"TNews":52.62,"TNews (cmn-Hans)":null,"Waimai":88.77,"Waimai (cmn-Hans)":null} +{"Rank":112,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"IFlyTek":null,"IFlyTek (cmn-Hans)":null,"JDReview":null,"JDReview (cmn-Hans)":null,"MultilingualSentiment":null,"MultilingualSentiment (cmn-Hans)":null,"OnlineShopping":null,"OnlineShopping (cmn-Hans)":null,"TNews":null,"TNews (cmn-Hans)":null,"Waimai":null,"Waimai (cmn-Hans)":null} diff --git a/boards_data/zh/data_tasks/Clustering/default.jsonl b/boards_data/zh/data_tasks/Clustering/default.jsonl index 05d92e454653a865c9031006d75c45eb109a1ffc..4b0f6a34ef3968172fc3bc47dced6c1423bb7bfe 100644 --- a/boards_data/zh/data_tasks/Clustering/default.jsonl +++ b/boards_data/zh/data_tasks/Clustering/default.jsonl @@ -1,66 +1,112 @@ -{"level_0":0,"index":15,"Rank":1,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":67.08,"CLSClusteringP2P":47.21,"CLSClusteringS2S":45.79,"ThuNewsClusteringP2P":87.43,"ThuNewsClusteringS2S":87.9} -{"level_0":1,"index":276,"Rank":2,"Model":"AGE_Hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.64,"CLSClusteringP2P":57.39,"CLSClusteringS2S":54.11,"ThuNewsClusteringP2P":79.15,"ThuNewsClusteringS2S":75.9} -{"level_0":2,"index":50,"Rank":3,"Model":"IYun-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.35,"CLSClusteringP2P":60.2,"CLSClusteringS2S":58.4,"ThuNewsClusteringP2P":76.98,"ThuNewsClusteringS2S":69.83} -{"level_0":3,"index":17,"Rank":4,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":66.06,"CLSClusteringP2P":47.07,"CLSClusteringS2S":45.99,"ThuNewsClusteringP2P":86.08,"ThuNewsClusteringS2S":85.11} -{"level_0":4,"index":142,"Rank":5,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.06,"CLSClusteringP2P":47.07,"CLSClusteringS2S":45.99,"ThuNewsClusteringP2P":86.08,"ThuNewsClusteringS2S":85.11} -{"level_0":5,"index":234,"Rank":6,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.06,"CLSClusteringP2P":47.07,"CLSClusteringS2S":45.99,"ThuNewsClusteringP2P":86.08,"ThuNewsClusteringS2S":85.11} -{"level_0":6,"index":207,"Rank":7,"Model":"xiaobu-embedding-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.17,"CLSClusteringP2P":60.42,"CLSClusteringS2S":49.54,"ThuNewsClusteringP2P":78.76,"ThuNewsClusteringS2S":71.96} -{"level_0":7,"index":169,"Rank":8,"Model":"zpoint_large_embedding_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.23,"CLSClusteringP2P":58.29,"CLSClusteringS2S":48.79,"ThuNewsClusteringP2P":72.48,"ThuNewsClusteringS2S":69.35} -{"level_0":8,"index":253,"Rank":9,"Model":"piccolo-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.17,"CLSClusteringP2P":57.93,"CLSClusteringS2S":48.1,"ThuNewsClusteringP2P":74.2,"ThuNewsClusteringS2S":68.43} -{"level_0":9,"index":38,"Rank":10,"Model":"Yinka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.98,"CLSClusteringP2P":56.35,"CLSClusteringS2S":48.49,"ThuNewsClusteringP2P":74.55,"ThuNewsClusteringS2S":68.55} -{"level_0":10,"index":29,"Rank":11,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.3,"CLSClusteringP2P":54.65,"CLSClusteringS2S":63.68,"ThuNewsClusteringP2P":64.32,"ThuNewsClusteringS2S":54.57} -{"level_0":11,"index":129,"Rank":12,"Model":"acge_text_embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.7,"CLSClusteringP2P":47.08,"CLSClusteringS2S":44.05,"ThuNewsClusteringP2P":74.66,"ThuNewsClusteringS2S":69.0} -{"level_0":12,"index":315,"Rank":13,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.88,"CLSClusteringP2P":60.37,"CLSClusteringS2S":51.09,"ThuNewsClusteringP2P":58.23,"ThuNewsClusteringS2S":57.83} -{"level_0":13,"index":206,"Rank":14,"Model":"xiaobu-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.62,"CLSClusteringP2P":43.24,"CLSClusteringS2S":41.23,"ThuNewsClusteringP2P":70.06,"ThuNewsClusteringS2S":63.94} -{"level_0":14,"index":16,"Rank":15,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.61,"CLSClusteringP2P":45.21,"CLSClusteringS2S":42.5,"ThuNewsClusteringP2P":68.24,"ThuNewsClusteringS2S":62.5} -{"level_0":15,"index":233,"Rank":16,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.61,"CLSClusteringP2P":45.21,"CLSClusteringS2S":42.5,"ThuNewsClusteringP2P":68.24,"ThuNewsClusteringS2S":62.5} -{"level_0":16,"index":116,"Rank":17,"Model":"stella-mrl-large-zh-v3.5-1792d-1024<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.32,"CLSClusteringP2P":43.03,"CLSClusteringS2S":40.42,"ThuNewsClusteringP2P":70.6,"ThuNewsClusteringS2S":63.21} -{"level_0":17,"index":155,"Rank":18,"Model":"stella-mrl-large-zh-v3.5-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.32,"CLSClusteringP2P":43.03,"CLSClusteringS2S":40.42,"ThuNewsClusteringP2P":70.6,"ThuNewsClusteringS2S":63.21} -{"level_0":18,"index":105,"Rank":19,"Model":"alime-embedding-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.0,"CLSClusteringP2P":42.52,"CLSClusteringS2S":39.73,"ThuNewsClusteringP2P":70.1,"ThuNewsClusteringS2S":63.67} -{"level_0":19,"index":154,"Rank":20,"Model":"stella-large-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.9,"CLSClusteringP2P":41.94,"CLSClusteringS2S":40.34,"ThuNewsClusteringP2P":69.61,"ThuNewsClusteringS2S":63.71} -{"level_0":20,"index":284,"Rank":21,"Model":"gte-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.86,"CLSClusteringP2P":42.58,"CLSClusteringS2S":40.42,"ThuNewsClusteringP2P":68.81,"ThuNewsClusteringS2S":63.61} -{"level_0":21,"index":12,"Rank":22,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.75,"CLSClusteringP2P":41.64,"CLSClusteringS2S":40.33,"ThuNewsClusteringP2P":69.28,"ThuNewsClusteringS2S":63.75} -{"level_0":22,"index":173,"Rank":23,"Model":"stella-base-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.3,"CLSClusteringP2P":42.86,"CLSClusteringS2S":39.98,"ThuNewsClusteringP2P":67.88,"ThuNewsClusteringS2S":62.47} -{"level_0":23,"index":286,"Rank":24,"Model":"gte-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.07,"CLSClusteringP2P":42.1,"CLSClusteringS2S":38.91,"ThuNewsClusteringP2P":68.36,"ThuNewsClusteringS2S":62.92} -{"level_0":24,"index":180,"Rank":25,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":52.3,"CLSClusteringP2P":44.42,"CLSClusteringS2S":42.58,"ThuNewsClusteringP2P":64.68,"ThuNewsClusteringS2S":57.53} -{"level_0":25,"index":46,"Rank":26,"Model":"Dmeta-embedding-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.96,"CLSClusteringP2P":40.24,"CLSClusteringS2S":39.17,"ThuNewsClusteringP2P":65.58,"ThuNewsClusteringS2S":58.84} -{"level_0":26,"index":47,"Rank":27,"Model":"Dmeta-embedding-zh-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.8,"CLSClusteringP2P":40.79,"CLSClusteringS2S":38.43,"ThuNewsClusteringP2P":65.15,"ThuNewsClusteringS2S":58.82} -{"level_0":27,"index":26,"Rank":28,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":50.01,"CLSClusteringP2P":41.23,"CLSClusteringS2S":40.04,"ThuNewsClusteringP2P":62.03,"ThuNewsClusteringS2S":56.75} -{"level_0":28,"index":51,"Rank":29,"Model":"mist-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.48,"CLSClusteringP2P":39.51,"CLSClusteringS2S":38.0,"ThuNewsClusteringP2P":62.57,"ThuNewsClusteringS2S":57.84} -{"level_0":29,"index":172,"Rank":30,"Model":"stella-base-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.4,"CLSClusteringP2P":40.26,"CLSClusteringS2S":37.18,"ThuNewsClusteringP2P":62.19,"ThuNewsClusteringS2S":57.97} -{"level_0":30,"index":175,"Rank":31,"Model":"stella-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.16,"CLSClusteringP2P":39.95,"CLSClusteringS2S":38.18,"ThuNewsClusteringP2P":61.4,"ThuNewsClusteringS2S":57.11} -{"level_0":31,"index":21,"Rank":32,"Model":"tao-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.04,"CLSClusteringP2P":40.03,"CLSClusteringS2S":38.19,"ThuNewsClusteringP2P":60.85,"ThuNewsClusteringS2S":57.09} -{"level_0":32,"index":20,"Rank":33,"Model":"tao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.0,"CLSClusteringP2P":39.96,"CLSClusteringS2S":38.19,"ThuNewsClusteringP2P":60.74,"ThuNewsClusteringS2S":57.09} -{"level_0":33,"index":27,"Rank":34,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":48.99,"CLSClusteringP2P":41.44,"CLSClusteringS2S":38.33,"ThuNewsClusteringP2P":59.61,"ThuNewsClusteringS2S":56.58} -{"level_0":34,"index":288,"Rank":35,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.95,"CLSClusteringP2P":39.54,"CLSClusteringS2S":37.33,"ThuNewsClusteringP2P":63.79,"ThuNewsClusteringS2S":55.14} -{"level_0":35,"index":41,"Rank":36,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.88,"CLSClusteringP2P":42.46,"CLSClusteringS2S":40.38,"ThuNewsClusteringP2P":58.54,"ThuNewsClusteringS2S":54.15} -{"level_0":36,"index":221,"Rank":37,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":48.88,"CLSClusteringP2P":38.6,"CLSClusteringS2S":38.02,"ThuNewsClusteringP2P":60.39,"ThuNewsClusteringS2S":58.51} -{"level_0":37,"index":40,"Rank":38,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.81,"CLSClusteringP2P":42.84,"CLSClusteringS2S":39.77,"ThuNewsClusteringP2P":58.93,"ThuNewsClusteringS2S":53.71} -{"level_0":38,"index":171,"Rank":39,"Model":"stella-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.7,"CLSClusteringP2P":39.71,"CLSClusteringS2S":36.56,"ThuNewsClusteringP2P":63.19,"ThuNewsClusteringS2S":55.36} -{"level_0":39,"index":174,"Rank":40,"Model":"stella-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.65,"CLSClusteringP2P":40.27,"CLSClusteringS2S":37.5,"ThuNewsClusteringP2P":62.15,"ThuNewsClusteringS2S":54.67} -{"level_0":40,"index":226,"Rank":41,"Model":"m3e-ernie-xbase-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.64,"CLSClusteringP2P":39.32,"CLSClusteringS2S":37.84,"ThuNewsClusteringP2P":60.69,"ThuNewsClusteringS2S":56.73} -{"level_0":41,"index":208,"Rank":42,"Model":"lim_base_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.35,"CLSClusteringP2P":33.55,"CLSClusteringS2S":36.18,"ThuNewsClusteringP2P":63.34,"ThuNewsClusteringS2S":60.33} -{"level_0":42,"index":184,"Rank":43,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":48.23,"CLSClusteringP2P":40.68,"CLSClusteringS2S":38.59,"ThuNewsClusteringP2P":58.05,"ThuNewsClusteringS2S":55.59} -{"level_0":43,"index":274,"Rank":44,"Model":"ZNV-Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":48.05,"CLSClusteringP2P":38.88,"CLSClusteringS2S":39.09,"ThuNewsClusteringP2P":59.51,"ThuNewsClusteringS2S":54.72} -{"level_0":44,"index":220,"Rank":45,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":47.68,"CLSClusteringP2P":39.81,"CLSClusteringS2S":37.34,"ThuNewsClusteringP2P":59.77,"ThuNewsClusteringS2S":53.78} -{"level_0":45,"index":23,"Rank":46,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":47.53,"CLSClusteringP2P":39.91,"CLSClusteringS2S":37.63,"ThuNewsClusteringP2P":58.45,"ThuNewsClusteringS2S":54.12} -{"level_0":46,"index":39,"Rank":47,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.21,"CLSClusteringP2P":42.21,"CLSClusteringS2S":40.33,"ThuNewsClusteringP2P":55.81,"ThuNewsClusteringS2S":50.5} -{"level_0":47,"index":251,"Rank":48,"Model":"piccolo-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.12,"CLSClusteringP2P":38.36,"CLSClusteringS2S":35.65,"ThuNewsClusteringP2P":61.44,"ThuNewsClusteringS2S":53.02} -{"level_0":48,"index":252,"Rank":49,"Model":"piccolo-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":47.04,"CLSClusteringP2P":38.98,"CLSClusteringS2S":36.04,"ThuNewsClusteringP2P":60.58,"ThuNewsClusteringS2S":52.56} -{"level_0":49,"index":199,"Rank":50,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.47,"CLSClusteringP2P":39.97,"CLSClusteringS2S":38.4,"ThuNewsClusteringP2P":54.08,"ThuNewsClusteringS2S":53.42} -{"level_0":50,"index":280,"Rank":51,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.47,"CLSClusteringP2P":39.97,"CLSClusteringS2S":38.4,"ThuNewsClusteringP2P":54.08,"ThuNewsClusteringS2S":53.42} -{"level_0":51,"index":311,"Rank":52,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.68,"CLSClusteringP2P":38.26,"CLSClusteringS2S":35.91,"ThuNewsClusteringP2P":58.71,"ThuNewsClusteringS2S":49.86} -{"level_0":52,"index":186,"Rank":53,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":45.26,"CLSClusteringP2P":39.14,"CLSClusteringS2S":37.79,"ThuNewsClusteringP2P":55.18,"ThuNewsClusteringS2S":48.93} -{"level_0":53,"index":279,"Rank":54,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":44.39,"CLSClusteringP2P":37.01,"CLSClusteringS2S":33.46,"ThuNewsClusteringP2P":58.83,"ThuNewsClusteringS2S":48.26} -{"level_0":54,"index":33,"Rank":55,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":44.18,"CLSClusteringP2P":38.14,"CLSClusteringS2S":35.14,"ThuNewsClusteringP2P":54.22,"ThuNewsClusteringS2S":49.22} -{"level_0":55,"index":189,"Rank":56,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.24,"CLSClusteringP2P":36.73,"CLSClusteringS2S":36.45,"ThuNewsClusteringP2P":50.24,"ThuNewsClusteringS2S":49.54} -{"level_0":56,"index":190,"Rank":57,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.42,"CLSClusteringP2P":35.21,"CLSClusteringS2S":35.12,"ThuNewsClusteringP2P":51.03,"ThuNewsClusteringS2S":48.32} -{"level_0":57,"index":188,"Rank":58,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.42,"CLSClusteringP2P":35.36,"CLSClusteringS2S":37.07,"ThuNewsClusteringP2P":48.91,"ThuNewsClusteringS2S":48.34} -{"level_0":58,"index":191,"Rank":59,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.32,"CLSClusteringP2P":36.0,"CLSClusteringS2S":36.35,"ThuNewsClusteringP2P":49.19,"ThuNewsClusteringS2S":47.76} -{"level_0":59,"index":183,"Rank":60,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":40.68,"CLSClusteringP2P":32.41,"CLSClusteringS2S":36.99,"ThuNewsClusteringP2P":40.98,"ThuNewsClusteringS2S":52.36} -{"level_0":60,"index":49,"Rank":61,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":39.67,"CLSClusteringP2P":39.24,"CLSClusteringS2S":35.36,"ThuNewsClusteringP2P":47.3,"ThuNewsClusteringS2S":36.77} -{"level_0":61,"index":277,"Rank":62,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":37.66,"CLSClusteringP2P":35.27,"CLSClusteringS2S":32.42,"ThuNewsClusteringP2P":42.92,"ThuNewsClusteringS2S":40.01} -{"level_0":62,"index":159,"Rank":63,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":34.44,"CLSClusteringP2P":27.43,"CLSClusteringS2S":28.32,"ThuNewsClusteringP2P":38.54,"ThuNewsClusteringS2S":43.45} -{"level_0":63,"index":48,"Rank":64,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":31.8,"CLSClusteringP2P":34.98,"CLSClusteringS2S":27.82,"ThuNewsClusteringP2P":40.17,"ThuNewsClusteringS2S":24.23} -{"level_0":64,"index":53,"Rank":65,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":30.02,"CLSClusteringP2P":30.13,"CLSClusteringS2S":28.77,"ThuNewsClusteringP2P":35.05,"ThuNewsClusteringS2S":26.14} -{"level_0":65,"index":289,"Rank":66,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":18.34,"CLSClusteringP2P":18.66,"CLSClusteringS2S":16.82,"ThuNewsClusteringP2P":20.69,"ThuNewsClusteringS2S":17.19} +{"Rank":1,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":67.08,"CLSClusteringP2P":47.21,"CLSClusteringS2S":45.79,"ThuNewsClusteringP2P":87.43,"ThuNewsClusteringS2S":87.9} +{"Rank":2,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.88,"CLSClusteringP2P":60.37,"CLSClusteringS2S":51.09,"ThuNewsClusteringP2P":58.23,"ThuNewsClusteringS2S":57.83} +{"Rank":3,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.75,"CLSClusteringP2P":41.64,"CLSClusteringS2S":40.33,"ThuNewsClusteringP2P":69.28,"ThuNewsClusteringS2S":63.75} +{"Rank":4,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":50.01,"CLSClusteringP2P":41.23,"CLSClusteringS2S":40.04,"ThuNewsClusteringP2P":62.03,"ThuNewsClusteringS2S":56.75} +{"Rank":5,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":48.99,"CLSClusteringP2P":41.44,"CLSClusteringS2S":38.33,"ThuNewsClusteringP2P":59.61,"ThuNewsClusteringS2S":56.58} +{"Rank":6,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":48.88,"CLSClusteringP2P":38.6,"CLSClusteringS2S":38.02,"ThuNewsClusteringP2P":60.39,"ThuNewsClusteringS2S":58.51} +{"Rank":7,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":48.23,"CLSClusteringP2P":40.68,"CLSClusteringS2S":38.59,"ThuNewsClusteringP2P":58.05,"ThuNewsClusteringS2S":55.59} +{"Rank":8,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":47.68,"CLSClusteringP2P":39.81,"CLSClusteringS2S":37.34,"ThuNewsClusteringP2P":59.77,"ThuNewsClusteringS2S":53.78} +{"Rank":9,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":47.53,"CLSClusteringP2P":39.91,"CLSClusteringS2S":37.63,"ThuNewsClusteringP2P":58.45,"ThuNewsClusteringS2S":54.12} +{"Rank":10,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.68,"CLSClusteringP2P":38.26,"CLSClusteringS2S":35.91,"ThuNewsClusteringP2P":58.71,"ThuNewsClusteringS2S":49.86} +{"Rank":11,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":45.26,"CLSClusteringP2P":39.14,"CLSClusteringS2S":37.79,"ThuNewsClusteringP2P":55.18,"ThuNewsClusteringS2S":48.93} +{"Rank":12,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":44.39,"CLSClusteringP2P":37.01,"CLSClusteringS2S":33.46,"ThuNewsClusteringP2P":58.83,"ThuNewsClusteringS2S":48.26} +{"Rank":13,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":44.18,"CLSClusteringP2P":38.14,"CLSClusteringS2S":35.14,"ThuNewsClusteringP2P":54.22,"ThuNewsClusteringS2S":49.22} +{"Rank":14,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":40.68,"CLSClusteringP2P":32.41,"CLSClusteringS2S":36.99,"ThuNewsClusteringP2P":40.98,"ThuNewsClusteringS2S":52.36} +{"Rank":15,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":37.66,"CLSClusteringP2P":35.27,"CLSClusteringS2S":32.42,"ThuNewsClusteringP2P":42.92,"ThuNewsClusteringS2S":40.01} +{"Rank":16,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":30.02,"CLSClusteringP2P":30.13,"CLSClusteringS2S":28.77,"ThuNewsClusteringP2P":35.05,"ThuNewsClusteringS2S":26.14} +{"Rank":17,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":18,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":19,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":20,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":21,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":22,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":23,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":24,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":25,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":26,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":27,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":28,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":29,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":30,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":31,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":32,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":33,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":34,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":35,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":36,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":37,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":38,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":39,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":40,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":41,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":42,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":43,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":44,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":45,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":46,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":47,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":48,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":49,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":50,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":51,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":52,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":53,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":54,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":55,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":56,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":57,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":58,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":59,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":60,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":61,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":62,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":63,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":64,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":65,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":66,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":67,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":68,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":69,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":70,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":71,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":72,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":73,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":74,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":75,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":76,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":77,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":78,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":79,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":80,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":81,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":82,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":83,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":84,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":85,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":86,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":87,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":88,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":89,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":90,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":91,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":92,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":93,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":94,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":95,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":96,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":97,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":98,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":99,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":100,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":101,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":102,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":103,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":104,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":105,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":106,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":107,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":108,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":109,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":110,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":111,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} +{"Rank":112,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CLSClusteringP2P":null,"CLSClusteringS2S":null,"ThuNewsClusteringP2P":null,"ThuNewsClusteringS2S":null} diff --git a/boards_data/zh/data_tasks/PairClassification/default.jsonl b/boards_data/zh/data_tasks/PairClassification/default.jsonl index 6f5614c6a0c0a0f6ade267b510b7fb93556ab305..b17c2fe3485a482025108f54e4e5ba67d169f09f 100644 --- a/boards_data/zh/data_tasks/PairClassification/default.jsonl +++ b/boards_data/zh/data_tasks/PairClassification/default.jsonl @@ -1,66 +1,112 @@ -{"level_0":0,"index":207,"Rank":1,"Model":"xiaobu-embedding-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":91.87,"Cmnli":92.84,"Ocnli":90.9} -{"level_0":1,"index":169,"Rank":2,"Model":"zpoint_large_embedding_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":91.55,"Cmnli":92.54,"Ocnli":90.56} -{"level_0":2,"index":50,"Rank":3,"Model":"IYun-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":90.87,"Cmnli":90.87,"Ocnli":90.87} -{"level_0":3,"index":253,"Rank":4,"Model":"piccolo-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":90.24,"Cmnli":92.64,"Ocnli":87.84} -{"level_0":4,"index":38,"Rank":5,"Model":"Yinka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":89.88,"Cmnli":92.49,"Ocnli":87.26} -{"level_0":5,"index":46,"Rank":6,"Model":"Dmeta-embedding-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.92,"Cmnli":90.66,"Ocnli":87.18} -{"level_0":6,"index":15,"Rank":7,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":88.52,"Cmnli":91.81,"Ocnli":85.22} -{"level_0":7,"index":154,"Rank":8,"Model":"stella-large-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.1,"Cmnli":90.77,"Ocnli":85.44} -{"level_0":8,"index":12,"Rank":9,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.1,"Cmnli":90.77,"Ocnli":85.44} -{"level_0":9,"index":155,"Rank":10,"Model":"stella-mrl-large-zh-v3.5-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.08,"Cmnli":90.66,"Ocnli":85.51} -{"level_0":10,"index":116,"Rank":11,"Model":"stella-mrl-large-zh-v3.5-1792d-1024<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.08,"Cmnli":90.66,"Ocnli":85.51} -{"level_0":11,"index":173,"Rank":12,"Model":"stella-base-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.93,"Cmnli":90.42,"Ocnli":85.43} -{"level_0":12,"index":129,"Rank":13,"Model":"acge_text_embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.84,"Cmnli":90.49,"Ocnli":85.18} -{"level_0":13,"index":47,"Rank":14,"Model":"Dmeta-embedding-zh-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.57,"Cmnli":90.29,"Ocnli":84.85} -{"level_0":14,"index":17,"Rank":15,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":87.48,"Cmnli":90.18,"Ocnli":84.78} -{"level_0":15,"index":142,"Rank":16,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.48,"Cmnli":90.18,"Ocnli":84.78} -{"level_0":16,"index":234,"Rank":17,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.48,"Cmnli":90.18,"Ocnli":84.78} -{"level_0":17,"index":276,"Rank":18,"Model":"AGE_Hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":87.08,"Cmnli":89.42,"Ocnli":84.74} -{"level_0":18,"index":16,"Rank":19,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.91,"Cmnli":86.22,"Ocnli":87.6} -{"level_0":19,"index":233,"Rank":20,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.91,"Cmnli":86.22,"Ocnli":87.6} -{"level_0":20,"index":29,"Rank":21,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":86.67,"Cmnli":90.13,"Ocnli":83.21} -{"level_0":21,"index":206,"Rank":22,"Model":"xiaobu-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":85.3,"Cmnli":89.95,"Ocnli":80.64} -{"level_0":22,"index":286,"Rank":23,"Model":"gte-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.41,"Cmnli":89.5,"Ocnli":79.32} -{"level_0":23,"index":105,"Rank":24,"Model":"alime-embedding-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":84.34,"Cmnli":89.36,"Ocnli":79.31} -{"level_0":24,"index":199,"Rank":25,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.94,"Cmnli":85.27,"Ocnli":80.62} -{"level_0":25,"index":280,"Rank":26,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.94,"Cmnli":85.27,"Ocnli":80.62} -{"level_0":26,"index":175,"Rank":27,"Model":"stella-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.68,"Cmnli":86.85,"Ocnli":78.51} -{"level_0":27,"index":20,"Rank":28,"Model":"tao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.68,"Cmnli":86.85,"Ocnli":78.5} -{"level_0":28,"index":21,"Rank":29,"Model":"tao-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.68,"Cmnli":86.85,"Ocnli":78.5} -{"level_0":29,"index":40,"Rank":30,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.38,"Cmnli":87.3,"Ocnli":77.46} -{"level_0":30,"index":41,"Rank":31,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.38,"Cmnli":87.55,"Ocnli":77.21} -{"level_0":31,"index":315,"Rank":32,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.32,"Cmnli":85.31,"Ocnli":79.33} -{"level_0":32,"index":27,"Rank":33,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":81.6,"Cmnli":85.27,"Ocnli":77.94} -{"level_0":33,"index":48,"Rank":34,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":81.46,"Cmnli":85.92,"Ocnli":77.01} -{"level_0":34,"index":49,"Rank":35,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":80.9,"Cmnli":86.7,"Ocnli":75.1} -{"level_0":35,"index":284,"Rank":36,"Model":"gte-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.44,"Cmnli":85.6,"Ocnli":75.28} -{"level_0":36,"index":51,"Rank":37,"Model":"mist-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":80.01,"Cmnli":84.76,"Ocnli":75.26} -{"level_0":37,"index":172,"Rank":38,"Model":"stella-base-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":79.96,"Cmnli":84.75,"Ocnli":75.16} -{"level_0":38,"index":23,"Rank":39,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":79.76,"Cmnli":84.1,"Ocnli":75.41} -{"level_0":39,"index":39,"Rank":40,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.77,"Cmnli":84.1,"Ocnli":73.44} -{"level_0":40,"index":174,"Rank":41,"Model":"stella-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.72,"Cmnli":85.14,"Ocnli":72.29} -{"level_0":41,"index":191,"Rank":42,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.65,"Cmnli":85.14,"Ocnli":72.15} -{"level_0":42,"index":252,"Rank":43,"Model":"piccolo-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":78.38,"Cmnli":83.83,"Ocnli":72.92} -{"level_0":43,"index":26,"Rank":44,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":76.77,"Cmnli":82.17,"Ocnli":71.37} -{"level_0":44,"index":251,"Rank":45,"Model":"piccolo-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.61,"Cmnli":82.33,"Ocnli":70.89} -{"level_0":45,"index":189,"Rank":46,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.59,"Cmnli":83.32,"Ocnli":69.86} -{"level_0":46,"index":171,"Rank":47,"Model":"stella-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.09,"Cmnli":81.65,"Ocnli":70.53} -{"level_0":47,"index":188,"Rank":48,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.96,"Cmnli":81.34,"Ocnli":68.58} -{"level_0":48,"index":208,"Rank":49,"Model":"lim_base_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.64,"Cmnli":79.58,"Ocnli":69.7} -{"level_0":49,"index":190,"Rank":50,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.38,"Cmnli":78.43,"Ocnli":66.32} -{"level_0":50,"index":180,"Rank":51,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":72.19,"Cmnli":80.21,"Ocnli":64.18} -{"level_0":51,"index":53,"Rank":52,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":70.86,"Cmnli":77.67,"Ocnli":64.04} -{"level_0":52,"index":33,"Rank":53,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":70.4,"Cmnli":76.24,"Ocnli":64.57} -{"level_0":53,"index":288,"Rank":54,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.0,"Cmnli":76.46,"Ocnli":63.54} -{"level_0":54,"index":184,"Rank":55,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":69.89,"Cmnli":78.18,"Ocnli":61.6} -{"level_0":55,"index":311,"Rank":56,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.56,"Cmnli":76.03,"Ocnli":63.08} -{"level_0":56,"index":274,"Rank":57,"Model":"ZNV-Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.76,"Cmnli":75.67,"Ocnli":61.85} -{"level_0":57,"index":277,"Rank":58,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":67.41,"Cmnli":73.87,"Ocnli":60.95} -{"level_0":58,"index":183,"Rank":59,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":67.07,"Cmnli":74.51,"Ocnli":59.63} -{"level_0":59,"index":279,"Rank":60,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":66.62,"Cmnli":72.55,"Ocnli":60.7} -{"level_0":60,"index":186,"Rank":61,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":66.45,"Cmnli":72.12,"Ocnli":60.77} -{"level_0":61,"index":221,"Rank":62,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":64.3,"Cmnli":69.27,"Ocnli":59.33} -{"level_0":62,"index":220,"Rank":63,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":63.99,"Cmnli":69.98,"Ocnli":58.0} -{"level_0":63,"index":226,"Rank":64,"Model":"m3e-ernie-xbase-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.84,"Cmnli":65.68,"Ocnli":59.99} -{"level_0":64,"index":159,"Rank":65,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.52,"Cmnli":61.86,"Ocnli":55.18} -{"level_0":65,"index":289,"Rank":66,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.7,"Cmnli":57.78,"Ocnli":55.63} +{"Rank":1,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":88.52,"Cmnli":91.81,"Ocnli":85.22} +{"Rank":2,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":88.1,"Cmnli":90.77,"Ocnli":85.44} +{"Rank":3,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":82.32,"Cmnli":85.31,"Ocnli":79.33} +{"Rank":4,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":81.6,"Cmnli":85.27,"Ocnli":77.94} +{"Rank":5,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":79.76,"Cmnli":84.1,"Ocnli":75.41} +{"Rank":6,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":76.77,"Cmnli":82.17,"Ocnli":71.37} +{"Rank":7,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":70.86,"Cmnli":77.67,"Ocnli":64.04} +{"Rank":8,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":70.4,"Cmnli":76.24,"Ocnli":64.57} +{"Rank":9,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":69.89,"Cmnli":78.18,"Ocnli":61.6} +{"Rank":10,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.56,"Cmnli":76.03,"Ocnli":63.08} +{"Rank":11,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":67.41,"Cmnli":73.87,"Ocnli":60.95} +{"Rank":12,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":67.07,"Cmnli":74.51,"Ocnli":59.63} +{"Rank":13,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":66.62,"Cmnli":72.55,"Ocnli":60.7} +{"Rank":14,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":66.45,"Cmnli":72.12,"Ocnli":60.77} +{"Rank":15,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":64.3,"Cmnli":69.27,"Ocnli":59.33} +{"Rank":16,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":63.99,"Cmnli":69.98,"Ocnli":58.0} +{"Rank":17,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":18,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":19,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":20,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":21,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":22,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":23,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":24,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":25,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":26,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":27,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":28,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":29,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":30,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":31,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":32,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":33,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":34,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":35,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":36,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":37,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":38,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":39,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":40,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":41,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":42,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":43,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":44,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":45,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":46,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":47,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":48,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":49,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":50,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":51,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":52,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":53,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":54,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":55,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":56,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":57,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":58,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":59,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":60,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":61,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":62,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":63,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":64,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":65,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":66,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":67,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":68,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":69,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":70,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":71,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":72,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":73,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":74,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":75,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":76,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":77,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":78,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":79,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":80,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":81,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":82,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":83,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":84,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":85,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":86,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":87,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":88,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":89,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":90,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":91,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":92,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":93,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":94,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":95,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":96,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":97,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":98,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":99,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":100,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":101,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":102,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":103,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":104,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":105,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":106,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":107,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":108,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":109,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":110,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":111,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"Cmnli":null,"Ocnli":null} +{"Rank":112,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"Cmnli":null,"Ocnli":null} diff --git a/boards_data/zh/data_tasks/Reranking/default.jsonl b/boards_data/zh/data_tasks/Reranking/default.jsonl index 72b55a24fe05307b903bb37449952b47121cbd36..8918e6f04e47bac0390bfc43a02c2a9f68bbee52 100644 --- a/boards_data/zh/data_tasks/Reranking/default.jsonl +++ b/boards_data/zh/data_tasks/Reranking/default.jsonl @@ -1,79 +1,112 @@ -{"level_0":0,"index":207,"Rank":1,"Model":"xiaobu-embedding-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.58,"CMedQAv1":90.96,"CMedQAv2":90.41,"MMarcoReranking":39.91,"T2Reranking":69.03} -{"level_0":1,"index":169,"Rank":2,"Model":"zpoint_large_embedding_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.34,"CMedQAv1":91.11,"CMedQAv2":90.07,"MMarcoReranking":38.87,"T2Reranking":69.29} -{"level_0":2,"index":248,"Rank":3,"Model":"360Zhinao-1.8B-Reranking<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.13,"CMedQAv1":86.75,"CMedQAv2":87.92,"MMarcoReranking":37.29,"T2Reranking":68.55} -{"level_0":3,"index":253,"Rank":4,"Model":"piccolo-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.0,"CMedQAv1":89.31,"CMedQAv2":90.14,"MMarcoReranking":33.39,"T2Reranking":67.15} -{"level_0":4,"index":38,"Rank":5,"Model":"Yinka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.78,"CMedQAv1":89.26,"CMedQAv2":90.05,"MMarcoReranking":32.74,"T2Reranking":67.05} -{"level_0":5,"index":315,"Rank":6,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.67,"CMedQAv1":88.06,"CMedQAv2":88.46,"MMarcoReranking":34.3,"T2Reranking":67.85} -{"level_0":6,"index":50,"Rank":7,"Model":"IYun-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.3,"CMedQAv1":88.74,"CMedQAv2":89.42,"MMarcoReranking":31.61,"T2Reranking":67.45} -{"level_0":7,"index":249,"Rank":8,"Model":"360Zhinao-search<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.92,"CMedQAv1":87.0,"CMedQAv2":88.48,"MMarcoReranking":32.41,"T2Reranking":67.8} -{"level_0":8,"index":234,"Rank":9,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.92,"CMedQAv1":88.2,"CMedQAv2":88.03,"MMarcoReranking":31.65,"T2Reranking":67.8} -{"level_0":9,"index":17,"Rank":10,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":68.92,"CMedQAv1":88.2,"CMedQAv2":88.03,"MMarcoReranking":31.65,"T2Reranking":67.8} -{"level_0":10,"index":142,"Rank":11,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.92,"CMedQAv1":88.2,"CMedQAv2":88.03,"MMarcoReranking":31.65,"T2Reranking":67.8} -{"level_0":11,"index":276,"Rank":12,"Model":"AGE_Hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.72,"CMedQAv1":89.37,"CMedQAv2":89.27,"MMarcoReranking":29.64,"T2Reranking":66.61} -{"level_0":12,"index":223,"Rank":13,"Model":"LdIR-reranker-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.58,"CMedQAv1":84.35,"CMedQAv2":86.45,"MMarcoReranking":35.64,"T2Reranking":67.86} -{"level_0":13,"index":116,"Rank":14,"Model":"stella-mrl-large-zh-v3.5-1792d-1024<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.45,"CMedQAv1":89.33,"CMedQAv2":89.18,"MMarcoReranking":28.85,"T2Reranking":66.43} -{"level_0":14,"index":155,"Rank":15,"Model":"stella-mrl-large-zh-v3.5-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.45,"CMedQAv1":89.33,"CMedQAv2":89.18,"MMarcoReranking":28.85,"T2Reranking":66.43} -{"level_0":15,"index":120,"Rank":16,"Model":"PEG<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.41,"CMedQAv1":84.09,"CMedQAv2":86.56,"MMarcoReranking":33.55,"T2Reranking":69.43} -{"level_0":16,"index":12,"Rank":17,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.27,"CMedQAv1":88.99,"CMedQAv2":89.6,"MMarcoReranking":28.12,"T2Reranking":66.38} -{"level_0":17,"index":154,"Rank":18,"Model":"stella-large-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.26,"CMedQAv1":88.99,"CMedQAv2":89.6,"MMarcoReranking":28.09,"T2Reranking":66.38} -{"level_0":18,"index":233,"Rank":19,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.21,"CMedQAv1":88.16,"CMedQAv2":88.12,"MMarcoReranking":29.14,"T2Reranking":67.43} -{"level_0":19,"index":16,"Rank":20,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.21,"CMedQAv1":88.16,"CMedQAv2":88.12,"MMarcoReranking":29.14,"T2Reranking":67.43} -{"level_0":20,"index":129,"Rank":21,"Model":"acge_text_embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.98,"CMedQAv1":88.66,"CMedQAv2":88.9,"MMarcoReranking":27.76,"T2Reranking":66.62} -{"level_0":21,"index":225,"Rank":22,"Model":"bge-reranker-large-1k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.87,"CMedQAv1":82.15,"CMedQAv2":84.19,"MMarcoReranking":37.64,"T2Reranking":67.48} -{"level_0":22,"index":173,"Rank":23,"Model":"stella-base-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.84,"CMedQAv1":88.34,"CMedQAv2":89.06,"MMarcoReranking":27.48,"T2Reranking":66.49} -{"level_0":23,"index":282,"Rank":24,"Model":"BAAI-bge-reranker-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.78,"CMedQAv1":82.14,"CMedQAv2":84.19,"MMarcoReranking":37.17,"T2Reranking":67.6} -{"level_0":24,"index":105,"Rank":25,"Model":"alime-embedding-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.61,"CMedQAv1":86.52,"CMedQAv2":87.11,"MMarcoReranking":30.63,"T2Reranking":66.18} -{"level_0":25,"index":106,"Rank":26,"Model":"alime-reranker-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.54,"CMedQAv1":82.32,"CMedQAv2":84.09,"MMarcoReranking":35.5,"T2Reranking":68.26} -{"level_0":26,"index":286,"Rank":27,"Model":"gte-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.4,"CMedQAv1":86.09,"CMedQAv2":86.46,"MMarcoReranking":31.19,"T2Reranking":65.86} -{"level_0":27,"index":206,"Rank":28,"Model":"xiaobu-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.34,"CMedQAv1":87.12,"CMedQAv2":87.57,"MMarcoReranking":28.08,"T2Reranking":66.6} -{"level_0":28,"index":46,"Rank":29,"Model":"Dmeta-embedding-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.17,"CMedQAv1":88.49,"CMedQAv2":89.18,"MMarcoReranking":24.76,"T2Reranking":66.26} -{"level_0":29,"index":31,"Rank":30,"Model":"bge-reranker-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.03,"CMedQAv1":81.27,"CMedQAv2":84.1,"MMarcoReranking":35.46,"T2Reranking":67.28} -{"level_0":30,"index":30,"Rank":31,"Model":"bge-reranker-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.03,"CMedQAv1":81.27,"CMedQAv2":84.1,"MMarcoReranking":35.46,"T2Reranking":67.28} -{"level_0":31,"index":144,"Rank":32,"Model":"bge-reranker-large-onnx<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.03,"CMedQAv1":81.27,"CMedQAv2":84.1,"MMarcoReranking":35.46,"T2Reranking":67.28} -{"level_0":32,"index":224,"Rank":33,"Model":"bge-reranker-base-1k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.02,"CMedQAv1":81.26,"CMedQAv2":84.11,"MMarcoReranking":35.46,"T2Reranking":67.25} -{"level_0":33,"index":284,"Rank":34,"Model":"gte-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.0,"CMedQAv1":86.79,"CMedQAv2":87.2,"MMarcoReranking":27.64,"T2Reranking":66.36} -{"level_0":34,"index":171,"Rank":35,"Model":"stella-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.95,"CMedQAv1":85.69,"CMedQAv2":86.46,"MMarcoReranking":29.2,"T2Reranking":66.46} -{"level_0":35,"index":47,"Rank":36,"Model":"Dmeta-embedding-zh-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.92,"CMedQAv1":86.78,"CMedQAv2":87.39,"MMarcoReranking":27.39,"T2Reranking":66.11} -{"level_0":36,"index":251,"Rank":37,"Model":"piccolo-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.68,"CMedQAv1":85.25,"CMedQAv2":86.15,"MMarcoReranking":28.87,"T2Reranking":66.46} -{"level_0":37,"index":280,"Rank":38,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.57,"CMedQAv1":83.64,"CMedQAv2":83.74,"MMarcoReranking":31.54,"T2Reranking":67.37} -{"level_0":38,"index":199,"Rank":39,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.57,"CMedQAv1":83.64,"CMedQAv2":83.74,"MMarcoReranking":31.54,"T2Reranking":67.37} -{"level_0":39,"index":175,"Rank":40,"Model":"stella-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.41,"CMedQAv1":85.45,"CMedQAv2":85.83,"MMarcoReranking":27.97,"T2Reranking":66.41} -{"level_0":40,"index":20,"Rank":41,"Model":"tao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.39,"CMedQAv1":85.34,"CMedQAv2":85.87,"MMarcoReranking":27.96,"T2Reranking":66.38} -{"level_0":41,"index":15,"Rank":42,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":66.38,"CMedQAv1":86.37,"CMedQAv2":87.41,"MMarcoReranking":23.64,"T2Reranking":68.11} -{"level_0":42,"index":21,"Rank":43,"Model":"tao-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.38,"CMedQAv1":85.34,"CMedQAv2":85.81,"MMarcoReranking":27.97,"T2Reranking":66.38} -{"level_0":43,"index":288,"Rank":44,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.22,"CMedQAv1":86.08,"CMedQAv2":87.26,"MMarcoReranking":26.13,"T2Reranking":65.39} -{"level_0":44,"index":172,"Rank":45,"Model":"stella-base-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.1,"CMedQAv1":84.7,"CMedQAv2":85.31,"MMarcoReranking":28.05,"T2Reranking":66.35} -{"level_0":45,"index":125,"Rank":46,"Model":"cloudy-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.04,"CMedQAv1":86.1,"CMedQAv2":86.95,"MMarcoReranking":24.26,"T2Reranking":66.83} -{"level_0":46,"index":298,"Rank":47,"Model":"hktv-fine-tuned-cloudy-large-zh-metaphor14<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":66.04,"CMedQAv1":86.1,"CMedQAv2":86.95,"MMarcoReranking":24.26,"T2Reranking":66.83} -{"level_0":47,"index":174,"Rank":48,"Model":"stella-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.98,"CMedQAv1":84.69,"CMedQAv2":85.23,"MMarcoReranking":27.16,"T2Reranking":66.86} -{"level_0":48,"index":252,"Rank":49,"Model":"piccolo-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.98,"CMedQAv1":84.79,"CMedQAv2":84.89,"MMarcoReranking":27.27,"T2Reranking":66.96} -{"level_0":49,"index":51,"Rank":50,"Model":"mist-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.89,"CMedQAv1":84.67,"CMedQAv2":85.27,"MMarcoReranking":27.84,"T2Reranking":65.79} -{"level_0":50,"index":27,"Rank":51,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":65.84,"CMedQAv1":83.45,"CMedQAv2":85.44,"MMarcoReranking":28.74,"T2Reranking":65.74} -{"level_0":51,"index":23,"Rank":52,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":65.4,"CMedQAv1":80.47,"CMedQAv2":84.88,"MMarcoReranking":29.74,"T2Reranking":66.49} -{"level_0":52,"index":26,"Rank":53,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":64.9,"CMedQAv1":81.72,"CMedQAv2":84.64,"MMarcoReranking":27.1,"T2Reranking":66.16} -{"level_0":53,"index":208,"Rank":54,"Model":"lim_base_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.73,"CMedQAv1":83.85,"CMedQAv2":84.75,"MMarcoReranking":20.56,"T2Reranking":65.75} -{"level_0":54,"index":40,"Rank":55,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.2,"CMedQAv1":78.39,"CMedQAv2":80.84,"MMarcoReranking":27.29,"T2Reranking":66.3} -{"level_0":55,"index":41,"Rank":56,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.82,"CMedQAv1":77.42,"CMedQAv2":77.84,"MMarcoReranking":30.17,"T2Reranking":65.85} -{"level_0":56,"index":180,"Rank":57,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":61.86,"CMedQAv1":77.68,"CMedQAv2":78.66,"MMarcoReranking":24.21,"T2Reranking":66.9} -{"level_0":57,"index":274,"Rank":58,"Model":"ZNV-Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.48,"CMedQAv1":80.4,"CMedQAv2":80.39,"MMarcoReranking":18.86,"T2Reranking":66.25} -{"level_0":58,"index":39,"Rank":59,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":61.35,"CMedQAv1":76.47,"CMedQAv2":77.44,"MMarcoReranking":24.31,"T2Reranking":67.18} -{"level_0":59,"index":226,"Rank":60,"Model":"m3e-ernie-xbase-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.96,"CMedQAv1":80.66,"CMedQAv2":79.31,"MMarcoReranking":19.57,"T2Reranking":64.31} -{"level_0":60,"index":33,"Rank":61,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":60.92,"CMedQAv1":77.4,"CMedQAv2":79.86,"MMarcoReranking":20.5,"T2Reranking":65.9} -{"level_0":61,"index":221,"Rank":62,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":59.66,"CMedQAv1":77.76,"CMedQAv2":78.27,"MMarcoReranking":16.46,"T2Reranking":66.13} -{"level_0":62,"index":220,"Rank":63,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":59.34,"CMedQAv1":77.05,"CMedQAv2":76.76,"MMarcoReranking":17.51,"T2Reranking":66.03} -{"level_0":63,"index":191,"Rank":64,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.11,"CMedQAv1":70.54,"CMedQAv2":71.35,"MMarcoReranking":26.69,"T2Reranking":67.88} -{"level_0":64,"index":189,"Rank":65,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.68,"CMedQAv1":68.91,"CMedQAv2":69.42,"MMarcoReranking":25.9,"T2Reranking":66.49} -{"level_0":65,"index":188,"Rank":66,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.27,"CMedQAv1":66.06,"CMedQAv2":66.96,"MMarcoReranking":26.14,"T2Reranking":65.94} -{"level_0":66,"index":184,"Rank":67,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":56.0,"CMedQAv1":68.25,"CMedQAv2":68.56,"MMarcoReranking":21.34,"T2Reranking":65.83} -{"level_0":67,"index":183,"Rank":68,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":54.35,"CMedQAv1":65.21,"CMedQAv2":66.06,"MMarcoReranking":21.76,"T2Reranking":64.39} -{"level_0":68,"index":311,"Rank":69,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.28,"CMedQAv1":63.08,"CMedQAv2":64.02,"MMarcoReranking":23.39,"T2Reranking":66.65} -{"level_0":69,"index":186,"Rank":70,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":53.86,"CMedQAv1":63.44,"CMedQAv2":62.41,"MMarcoReranking":24.33,"T2Reranking":65.24} -{"level_0":70,"index":190,"Rank":71,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":53.62,"CMedQAv1":63.41,"CMedQAv2":63.66,"MMarcoReranking":23.69,"T2Reranking":63.74} -{"level_0":71,"index":277,"Rank":72,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":49.45,"CMedQAv1":59.26,"CMedQAv2":59.82,"MMarcoReranking":12.76,"T2Reranking":65.95} -{"level_0":72,"index":279,"Rank":73,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":49.25,"CMedQAv1":57.82,"CMedQAv2":58.88,"MMarcoReranking":14.55,"T2Reranking":65.76} -{"level_0":73,"index":53,"Rank":74,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":49.16,"CMedQAv1":58.92,"CMedQAv2":60.41,"MMarcoReranking":12.48,"T2Reranking":64.82} -{"level_0":74,"index":48,"Rank":75,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.08,"CMedQAv1":53.07,"CMedQAv2":52.84,"MMarcoReranking":9.29,"T2Reranking":65.14} -{"level_0":75,"index":49,"Rank":76,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":44.91,"CMedQAv1":50.19,"CMedQAv2":50.39,"MMarcoReranking":14.57,"T2Reranking":64.49} -{"level_0":76,"index":159,"Rank":77,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":32.84,"CMedQAv1":34.31,"CMedQAv2":35.88,"MMarcoReranking":5.83,"T2Reranking":55.35} -{"level_0":77,"index":289,"Rank":78,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":25.86,"CMedQAv1":19.72,"CMedQAv2":22.48,"MMarcoReranking":1.17,"T2Reranking":60.05} -{"level_0":78,"index":29,"Rank":99,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","CMedQAv1":"","CMedQAv2":"","MMarcoReranking":35.43,"T2Reranking":67.48} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":53.24,"CMedQAv1":68.25,"CMedQAv2":68.56,"MMarcoReranking":21.34,"MMarcoReranking (cmn-Hans)":29.12,"T2Reranking":65.83,"T2Reranking (cmn-Hans)":66.32} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":52.13,"CMedQAv1":65.21,"CMedQAv2":66.06,"MMarcoReranking":21.76,"MMarcoReranking (cmn-Hans)":30.52,"T2Reranking":64.39,"T2Reranking (cmn-Hans)":64.86} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":51.85,"CMedQAv1":63.44,"CMedQAv2":62.41,"MMarcoReranking":24.33,"MMarcoReranking (cmn-Hans)":29.98,"T2Reranking":65.24,"T2Reranking (cmn-Hans)":65.72} +{"Rank":4,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":5,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":6,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":7,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":8,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":9,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":10,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":12,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":13,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":14,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":15,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":16,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":88.99,"CMedQAv2":89.6,"MMarcoReranking":28.12,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":66.38,"T2Reranking (cmn-Hans)":null} +{"Rank":17,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":null,"CMedQAv1":86.37,"CMedQAv2":87.41,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":68.11,"T2Reranking (cmn-Hans)":null} +{"Rank":18,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"CMedQAv1":80.47,"CMedQAv2":84.88,"MMarcoReranking":29.74,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":66.49,"T2Reranking (cmn-Hans)":null} +{"Rank":19,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"CMedQAv1":81.72,"CMedQAv2":84.64,"MMarcoReranking":27.1,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":66.16,"T2Reranking (cmn-Hans)":null} +{"Rank":20,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"CMedQAv1":83.45,"CMedQAv2":85.44,"MMarcoReranking":28.74,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":65.74,"T2Reranking (cmn-Hans)":null} +{"Rank":21,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":22,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":null,"CMedQAv1":77.4,"CMedQAv2":79.86,"MMarcoReranking":20.5,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":65.9,"T2Reranking (cmn-Hans)":null} +{"Rank":23,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":24,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":25,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":26,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":27,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":28,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"CMedQAv1":58.92,"CMedQAv2":60.41,"MMarcoReranking":12.48,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":64.82,"T2Reranking (cmn-Hans)":null} +{"Rank":29,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":30,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":31,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":32,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":21.7,"T2Reranking":null,"T2Reranking (cmn-Hans)":65.63} +{"Rank":33,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":34,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":35,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":36,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":37,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":38,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":39,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":40,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":41,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":42,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":43,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":44,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":45,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":46,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":47,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":48,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":49,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":50,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":51,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":52,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":53,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":54,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":55,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":56,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":57,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":58,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":59,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":60,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":61,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":62,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":63,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":64,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"CMedQAv1":77.05,"CMedQAv2":76.76,"MMarcoReranking":17.51,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":66.03,"T2Reranking (cmn-Hans)":null} +{"Rank":65,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"CMedQAv1":77.76,"CMedQAv2":78.27,"MMarcoReranking":16.46,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":66.13,"T2Reranking (cmn-Hans)":null} +{"Rank":66,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":67,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":68,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":69,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":70,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":71,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":72,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":73,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":74,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":14.83,"T2Reranking":null,"T2Reranking (cmn-Hans)":63.29} +{"Rank":75,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":5.27,"T2Reranking":null,"T2Reranking (cmn-Hans)":60.32} +{"Rank":76,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":4.74,"T2Reranking":null,"T2Reranking (cmn-Hans)":56.26} +{"Rank":77,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":4.65,"T2Reranking":null,"T2Reranking (cmn-Hans)":58.3} +{"Rank":78,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":79,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":80,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":81,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":82,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":83,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":84,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":85,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":86,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":87,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":88,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":16.14,"T2Reranking":null,"T2Reranking (cmn-Hans)":65.28} +{"Rank":89,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":14.57,"T2Reranking":null,"T2Reranking (cmn-Hans)":64.49} +{"Rank":90,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":91,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":92,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":93,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":94,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":95,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":96,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"CMedQAv1":59.26,"CMedQAv2":59.82,"MMarcoReranking":12.76,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":65.95,"T2Reranking (cmn-Hans)":null} +{"Rank":97,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":null,"CMedQAv1":57.82,"CMedQAv2":58.88,"MMarcoReranking":14.55,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":65.76,"T2Reranking (cmn-Hans)":null} +{"Rank":98,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":99,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":100,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":101,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":102,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":103,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":104,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":105,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":106,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":107,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":63.08,"CMedQAv2":64.02,"MMarcoReranking":23.39,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":66.65,"T2Reranking (cmn-Hans)":null} +{"Rank":108,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":109,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":110,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} +{"Rank":111,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CMedQAv1":88.06,"CMedQAv2":88.46,"MMarcoReranking":34.3,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":67.85,"T2Reranking (cmn-Hans)":null} +{"Rank":112,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CMedQAv1":null,"CMedQAv2":null,"MMarcoReranking":null,"MMarcoReranking (cmn-Hans)":null,"T2Reranking":null,"T2Reranking (cmn-Hans)":null} diff --git a/boards_data/zh/data_tasks/Retrieval/default.jsonl b/boards_data/zh/data_tasks/Retrieval/default.jsonl index 65cd7704f0f5fc002611b5deb0109c125db9e468..2b0949510c80f07225fb5e0ef24d0335df71c801 100644 --- a/boards_data/zh/data_tasks/Retrieval/default.jsonl +++ b/boards_data/zh/data_tasks/Retrieval/default.jsonl @@ -1,72 +1,112 @@ -{"level_0":0,"index":72,"Rank":1,"Model":"Zhihui_LLM_Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.74,"CmedqaRetrieval":48.69,"CovidRetrieval":84.39,"DuRetrieval":91.34,"EcomRetrieval":71.96,"MedicalRetrieval":65.19,"MMarcoRetrieval":84.77,"T2Retrieval":88.3,"VideoRetrieval":79.31} -{"level_0":1,"index":207,"Rank":2,"Model":"xiaobu-embedding-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.5,"CmedqaRetrieval":47.14,"CovidRetrieval":89.4,"DuRetrieval":89.44,"EcomRetrieval":70.5,"MedicalRetrieval":68.19,"MMarcoRetrieval":82.19,"T2Retrieval":85.01,"VideoRetrieval":80.09} -{"level_0":2,"index":169,"Rank":3,"Model":"zpoint_large_embedding_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.36,"CmedqaRetrieval":47.16,"CovidRetrieval":89.14,"DuRetrieval":89.23,"EcomRetrieval":70.74,"MedicalRetrieval":68.14,"MMarcoRetrieval":82.38,"T2Retrieval":83.81,"VideoRetrieval":80.26} -{"level_0":3,"index":234,"Rank":4,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.03,"CmedqaRetrieval":48.69,"CovidRetrieval":83.65,"DuRetrieval":87.44,"EcomRetrieval":71.15,"MedicalRetrieval":65.59,"MMarcoRetrieval":85.16,"T2Retrieval":87.73,"VideoRetrieval":78.84} -{"level_0":4,"index":142,"Rank":5,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":76.03,"CmedqaRetrieval":48.69,"CovidRetrieval":83.65,"DuRetrieval":87.44,"EcomRetrieval":71.15,"MedicalRetrieval":65.59,"MMarcoRetrieval":85.16,"T2Retrieval":87.73,"VideoRetrieval":78.84} -{"level_0":5,"index":17,"Rank":6,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":76.03,"CmedqaRetrieval":48.69,"CovidRetrieval":83.65,"DuRetrieval":87.44,"EcomRetrieval":71.15,"MedicalRetrieval":65.59,"MMarcoRetrieval":85.16,"T2Retrieval":87.73,"VideoRetrieval":78.84} -{"level_0":6,"index":249,"Rank":7,"Model":"360Zhinao-search<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":75.06,"CmedqaRetrieval":46.73,"CovidRetrieval":85.02,"DuRetrieval":87.57,"EcomRetrieval":68.9,"MedicalRetrieval":63.69,"MMarcoRetrieval":83.32,"T2Retrieval":87.12,"VideoRetrieval":78.09} -{"level_0":7,"index":276,"Rank":8,"Model":"AGE_Hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.97,"CmedqaRetrieval":47.26,"CovidRetrieval":83.66,"DuRetrieval":89.28,"EcomRetrieval":69.28,"MedicalRetrieval":65.94,"MMarcoRetrieval":80.65,"T2Retrieval":86.88,"VideoRetrieval":76.79} -{"level_0":8,"index":12,"Rank":9,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.41,"CmedqaRetrieval":46.56,"CovidRetrieval":84.03,"DuRetrieval":87.85,"EcomRetrieval":68.79,"MedicalRetrieval":65.92,"MMarcoRetrieval":79.93,"T2Retrieval":86.76,"VideoRetrieval":75.43} -{"level_0":9,"index":38,"Rank":10,"Model":"Yinka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.41,"CmedqaRetrieval":47.64,"CovidRetrieval":85.95,"DuRetrieval":89.11,"EcomRetrieval":67.92,"MedicalRetrieval":65.2,"MMarcoRetrieval":79.81,"T2Retrieval":86.24,"VideoRetrieval":73.39} -{"level_0":10,"index":253,"Rank":11,"Model":"piccolo-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":74.36,"CmedqaRetrieval":47.58,"CovidRetrieval":86.78,"DuRetrieval":89.14,"EcomRetrieval":67.75,"MedicalRetrieval":64.88,"MMarcoRetrieval":79.54,"T2Retrieval":86.14,"VideoRetrieval":73.1} -{"level_0":11,"index":120,"Rank":12,"Model":"PEG<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.78,"CmedqaRetrieval":44.42,"CovidRetrieval":82.56,"DuRetrieval":87.67,"EcomRetrieval":67.32,"MedicalRetrieval":60.99,"MMarcoRetrieval":82.63,"T2Retrieval":87.0,"VideoRetrieval":77.64} -{"level_0":12,"index":29,"Rank":13,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.73,"CmedqaRetrieval":42.21,"CovidRetrieval":77.46,"DuRetrieval":90.46,"EcomRetrieval":69.3,"MedicalRetrieval":62.02,"MMarcoRetrieval":84.7,"T2Retrieval":86.26,"VideoRetrieval":77.4} -{"level_0":13,"index":154,"Rank":14,"Model":"stella-large-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.6,"CmedqaRetrieval":46.87,"CovidRetrieval":82.44,"DuRetrieval":87.13,"EcomRetrieval":68.62,"MedicalRetrieval":65.18,"MMarcoRetrieval":79.14,"T2Retrieval":85.56,"VideoRetrieval":73.89} -{"level_0":14,"index":50,"Rank":15,"Model":"IYun-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.56,"CmedqaRetrieval":46.68,"CovidRetrieval":85.24,"DuRetrieval":88.26,"EcomRetrieval":67.23,"MedicalRetrieval":64.29,"MMarcoRetrieval":78.64,"T2Retrieval":85.29,"VideoRetrieval":72.89} -{"level_0":15,"index":155,"Rank":16,"Model":"stella-mrl-large-zh-v3.5-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.52,"CmedqaRetrieval":46.61,"CovidRetrieval":81.87,"DuRetrieval":86.8,"EcomRetrieval":68.1,"MedicalRetrieval":64.99,"MMarcoRetrieval":79.21,"T2Retrieval":85.85,"VideoRetrieval":74.71} -{"level_0":16,"index":116,"Rank":17,"Model":"stella-mrl-large-zh-v3.5-1792d-1024<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.52,"CmedqaRetrieval":46.61,"CovidRetrieval":81.87,"DuRetrieval":86.8,"EcomRetrieval":68.1,"MedicalRetrieval":64.99,"MMarcoRetrieval":79.21,"T2Retrieval":85.85,"VideoRetrieval":74.71} -{"level_0":17,"index":206,"Rank":18,"Model":"xiaobu-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.41,"CmedqaRetrieval":44.22,"CovidRetrieval":87.69,"DuRetrieval":86.45,"EcomRetrieval":68.04,"MedicalRetrieval":63.38,"MMarcoRetrieval":78.47,"T2Retrieval":85.45,"VideoRetrieval":73.59} -{"level_0":18,"index":105,"Rank":19,"Model":"alime-embedding-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.3,"CmedqaRetrieval":43.93,"CovidRetrieval":88.14,"DuRetrieval":86.23,"EcomRetrieval":67.56,"MedicalRetrieval":63.57,"MMarcoRetrieval":78.25,"T2Retrieval":84.58,"VideoRetrieval":74.16} -{"level_0":19,"index":315,"Rank":20,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":73.12,"CmedqaRetrieval":47.64,"CovidRetrieval":86.86,"DuRetrieval":88.43,"EcomRetrieval":66.39,"MedicalRetrieval":61.1,"MMarcoRetrieval":80.17,"T2Retrieval":80.11,"VideoRetrieval":74.28} -{"level_0":20,"index":129,"Rank":21,"Model":"acge_text_embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.93,"CmedqaRetrieval":46.78,"CovidRetrieval":81.56,"DuRetrieval":86.55,"EcomRetrieval":67.6,"MedicalRetrieval":64.1,"MMarcoRetrieval":78.42,"T2Retrieval":85.08,"VideoRetrieval":73.32} -{"level_0":21,"index":298,"Rank":22,"Model":"hktv-fine-tuned-cloudy-large-zh-metaphor14<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.92,"CmedqaRetrieval":43.58,"CovidRetrieval":85.83,"DuRetrieval":86.2,"EcomRetrieval":66.99,"MedicalRetrieval":64.23,"MMarcoRetrieval":77.69,"T2Retrieval":84.94,"VideoRetrieval":73.86} -{"level_0":22,"index":125,"Rank":23,"Model":"cloudy-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.92,"CmedqaRetrieval":43.58,"CovidRetrieval":85.83,"DuRetrieval":86.2,"EcomRetrieval":66.99,"MedicalRetrieval":64.23,"MMarcoRetrieval":77.69,"T2Retrieval":84.94,"VideoRetrieval":73.86} -{"level_0":23,"index":286,"Rank":24,"Model":"gte-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.49,"CmedqaRetrieval":43.15,"CovidRetrieval":88.41,"DuRetrieval":85.04,"EcomRetrieval":67.25,"MedicalRetrieval":62.88,"MMarcoRetrieval":77.53,"T2Retrieval":81.93,"VideoRetrieval":73.7} -{"level_0":24,"index":173,"Rank":25,"Model":"stella-base-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":72.28,"CmedqaRetrieval":45.38,"CovidRetrieval":80.61,"DuRetrieval":85.55,"EcomRetrieval":67.88,"MedicalRetrieval":64.34,"MMarcoRetrieval":77.42,"T2Retrieval":84.5,"VideoRetrieval":72.56} -{"level_0":25,"index":233,"Rank":26,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.86,"CmedqaRetrieval":46.97,"CovidRetrieval":80.79,"DuRetrieval":89.4,"EcomRetrieval":62.51,"MedicalRetrieval":58.65,"MMarcoRetrieval":83.01,"T2Retrieval":85.47,"VideoRetrieval":68.11} -{"level_0":26,"index":16,"Rank":27,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.86,"CmedqaRetrieval":46.97,"CovidRetrieval":80.79,"DuRetrieval":89.4,"EcomRetrieval":62.51,"MedicalRetrieval":58.65,"MMarcoRetrieval":83.01,"T2Retrieval":85.47,"VideoRetrieval":68.11} -{"level_0":27,"index":21,"Rank":28,"Model":"tao-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.85,"CmedqaRetrieval":42.73,"CovidRetrieval":82.19,"DuRetrieval":88.02,"EcomRetrieval":64.71,"MedicalRetrieval":60.63,"MMarcoRetrieval":79.56,"T2Retrieval":84.69,"VideoRetrieval":72.25} -{"level_0":28,"index":284,"Rank":29,"Model":"gte-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.71,"CmedqaRetrieval":44.38,"CovidRetrieval":75.74,"DuRetrieval":83.64,"EcomRetrieval":69.56,"MedicalRetrieval":64.94,"MMarcoRetrieval":77.7,"T2Retrieval":83.08,"VideoRetrieval":74.67} -{"level_0":29,"index":251,"Rank":30,"Model":"piccolo-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.2,"CmedqaRetrieval":42.02,"CovidRetrieval":84.5,"DuRetrieval":88.25,"EcomRetrieval":62.88,"MedicalRetrieval":58.09,"MMarcoRetrieval":77.92,"T2Retrieval":82.38,"VideoRetrieval":73.54} -{"level_0":30,"index":171,"Rank":31,"Model":"stella-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.07,"CmedqaRetrieval":41.86,"CovidRetrieval":84.71,"DuRetrieval":87.8,"EcomRetrieval":62.46,"MedicalRetrieval":57.97,"MMarcoRetrieval":78.22,"T2Retrieval":82.51,"VideoRetrieval":73.04} -{"level_0":31,"index":174,"Rank":32,"Model":"stella-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.02,"CmedqaRetrieval":42.28,"CovidRetrieval":85.19,"DuRetrieval":87.81,"EcomRetrieval":61.66,"MedicalRetrieval":59.38,"MMarcoRetrieval":78.1,"T2Retrieval":82.76,"VideoRetrieval":70.96} -{"level_0":32,"index":252,"Rank":33,"Model":"piccolo-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.93,"CmedqaRetrieval":41.98,"CovidRetrieval":85.04,"DuRetrieval":87.97,"EcomRetrieval":61.91,"MedicalRetrieval":59.04,"MMarcoRetrieval":77.83,"T2Retrieval":82.47,"VideoRetrieval":71.18} -{"level_0":33,"index":245,"Rank":34,"Model":"checkpoint-9000<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.63,"CmedqaRetrieval":40.57,"CovidRetrieval":75.93,"DuRetrieval":83.73,"EcomRetrieval":66.14,"MedicalRetrieval":58.58,"MMarcoRetrieval":81.95,"T2Retrieval":84.17,"VideoRetrieval":73.94} -{"level_0":34,"index":96,"Rank":35,"Model":"qwen-1.8b-retrieval-test<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.63,"CmedqaRetrieval":40.57,"CovidRetrieval":75.93,"DuRetrieval":83.73,"EcomRetrieval":66.14,"MedicalRetrieval":58.58,"MMarcoRetrieval":81.95,"T2Retrieval":84.17,"VideoRetrieval":73.94} -{"level_0":35,"index":15,"Rank":36,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":70.62,"CmedqaRetrieval":43.47,"CovidRetrieval":80.87,"DuRetrieval":86.01,"EcomRetrieval":66.46,"MedicalRetrieval":61.33,"MMarcoRetrieval":73.83,"T2Retrieval":83.58,"VideoRetrieval":69.41} -{"level_0":36,"index":26,"Rank":37,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":70.54,"CmedqaRetrieval":41.03,"CovidRetrieval":75.07,"DuRetrieval":84.68,"EcomRetrieval":65.6,"MedicalRetrieval":58.28,"MMarcoRetrieval":81.38,"T2Retrieval":84.39,"VideoRetrieval":73.93} -{"level_0":37,"index":27,"Rank":38,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":70.46,"CmedqaRetrieval":42.57,"CovidRetrieval":73.35,"DuRetrieval":86.32,"EcomRetrieval":65.33,"MedicalRetrieval":59.59,"MMarcoRetrieval":79.23,"T2Retrieval":83.99,"VideoRetrieval":73.32} -{"level_0":38,"index":46,"Rank":39,"Model":"Dmeta-embedding-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.41,"CmedqaRetrieval":45.83,"CovidRetrieval":76.77,"DuRetrieval":81.47,"EcomRetrieval":68.42,"MedicalRetrieval":64.42,"MMarcoRetrieval":71.7,"T2Retrieval":81.33,"VideoRetrieval":73.31} -{"level_0":39,"index":20,"Rank":40,"Model":"tao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.26,"CmedqaRetrieval":42.68,"CovidRetrieval":80.79,"DuRetrieval":85.74,"EcomRetrieval":62.06,"MedicalRetrieval":59.53,"MMarcoRetrieval":78.66,"T2Retrieval":84.69,"VideoRetrieval":67.96} -{"level_0":40,"index":175,"Rank":41,"Model":"stella-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.14,"CmedqaRetrieval":42.63,"CovidRetrieval":80.81,"DuRetrieval":85.75,"EcomRetrieval":62.07,"MedicalRetrieval":59.42,"MMarcoRetrieval":78.67,"T2Retrieval":83.87,"VideoRetrieval":67.93} -{"level_0":41,"index":172,"Rank":42,"Model":"stella-base-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.08,"CmedqaRetrieval":42.03,"CovidRetrieval":79.86,"DuRetrieval":86.97,"EcomRetrieval":62.85,"MedicalRetrieval":58.5,"MMarcoRetrieval":78.36,"T2Retrieval":83.76,"VideoRetrieval":68.27} -{"level_0":42,"index":51,"Rank":43,"Model":"mist-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.59,"CmedqaRetrieval":41.8,"CovidRetrieval":79.3,"DuRetrieval":86.43,"EcomRetrieval":62.64,"MedicalRetrieval":58.27,"MMarcoRetrieval":78.17,"T2Retrieval":82.11,"VideoRetrieval":67.96} -{"level_0":43,"index":23,"Rank":44,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":69.49,"CmedqaRetrieval":41.61,"CovidRetrieval":74.7,"DuRetrieval":85.07,"EcomRetrieval":64.25,"MedicalRetrieval":56.51,"MMarcoRetrieval":77.69,"T2Retrieval":83.71,"VideoRetrieval":72.35} -{"level_0":44,"index":199,"Rank":45,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.4,"CmedqaRetrieval":39.15,"CovidRetrieval":81.22,"DuRetrieval":84.57,"EcomRetrieval":63.95,"MedicalRetrieval":57.12,"MMarcoRetrieval":77.96,"T2Retrieval":80.59,"VideoRetrieval":70.62} -{"level_0":45,"index":280,"Rank":46,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":69.4,"CmedqaRetrieval":39.15,"CovidRetrieval":81.22,"DuRetrieval":84.57,"EcomRetrieval":63.95,"MedicalRetrieval":57.12,"MMarcoRetrieval":77.96,"T2Retrieval":80.59,"VideoRetrieval":70.62} -{"level_0":46,"index":47,"Rank":47,"Model":"Dmeta-embedding-zh-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.7,"CmedqaRetrieval":42.69,"CovidRetrieval":75.63,"DuRetrieval":79.89,"EcomRetrieval":64.48,"MedicalRetrieval":59.95,"MMarcoRetrieval":70.75,"T2Retrieval":79.9,"VideoRetrieval":68.3} -{"level_0":47,"index":288,"Rank":48,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.5,"CmedqaRetrieval":42.46,"CovidRetrieval":61.65,"DuRetrieval":78.57,"EcomRetrieval":57.91,"MedicalRetrieval":62.64,"MMarcoRetrieval":75.0,"T2Retrieval":78.12,"VideoRetrieval":67.69} -{"level_0":48,"index":40,"Rank":49,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.11,"CmedqaRetrieval":34.08,"CovidRetrieval":71.92,"DuRetrieval":79.7,"EcomRetrieval":59.22,"MedicalRetrieval":51.5,"MMarcoRetrieval":74.73,"T2Retrieval":77.08,"VideoRetrieval":64.67} -{"level_0":49,"index":184,"Rank":50,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":63.66,"CmedqaRetrieval":28.67,"CovidRetrieval":75.51,"DuRetrieval":85.32,"EcomRetrieval":54.75,"MedicalRetrieval":51.44,"MMarcoRetrieval":79.2,"T2Retrieval":76.11,"VideoRetrieval":58.25} -{"level_0":50,"index":41,"Rank":51,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.52,"CmedqaRetrieval":33.45,"CovidRetrieval":73.36,"DuRetrieval":80.13,"EcomRetrieval":55.89,"MedicalRetrieval":50.67,"MMarcoRetrieval":74.14,"T2Retrieval":76.84,"VideoRetrieval":63.66} -{"level_0":51,"index":191,"Rank":52,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.87,"CmedqaRetrieval":27.0,"CovidRetrieval":68.86,"DuRetrieval":78.08,"EcomRetrieval":58.34,"MedicalRetrieval":49.63,"MMarcoRetrieval":78.82,"T2Retrieval":79.82,"VideoRetrieval":62.44} -{"level_0":52,"index":33,"Rank":53,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":61.77,"CmedqaRetrieval":35.11,"CovidRetrieval":70.14,"DuRetrieval":77.28,"EcomRetrieval":55.71,"MedicalRetrieval":49.8,"MMarcoRetrieval":63.48,"T2Retrieval":76.43,"VideoRetrieval":66.19} -{"level_0":53,"index":180,"Rank":54,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":61.75,"CmedqaRetrieval":34.22,"CovidRetrieval":73.13,"DuRetrieval":87.02,"EcomRetrieval":45.96,"MedicalRetrieval":52.75,"MMarcoRetrieval":74.83,"T2Retrieval":80.68,"VideoRetrieval":45.39} -{"level_0":54,"index":183,"Rank":55,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":61.63,"CmedqaRetrieval":27.2,"CovidRetrieval":73.45,"DuRetrieval":81.64,"EcomRetrieval":54.17,"MedicalRetrieval":48.35,"MMarcoRetrieval":76.04,"T2Retrieval":70.86,"VideoRetrieval":61.3} -{"level_0":55,"index":39,"Rank":56,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.6,"CmedqaRetrieval":31.79,"CovidRetrieval":66.33,"DuRetrieval":75.25,"EcomRetrieval":57.32,"MedicalRetrieval":49.28,"MMarcoRetrieval":69.11,"T2Retrieval":75.94,"VideoRetrieval":59.76} -{"level_0":56,"index":186,"Rank":57,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":59.95,"CmedqaRetrieval":24.38,"CovidRetrieval":72.82,"DuRetrieval":81.35,"EcomRetrieval":53.56,"MedicalRetrieval":44.84,"MMarcoRetrieval":73.17,"T2Retrieval":71.39,"VideoRetrieval":58.09} -{"level_0":57,"index":189,"Rank":58,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.77,"CmedqaRetrieval":26.18,"CovidRetrieval":67.57,"DuRetrieval":75.82,"EcomRetrieval":51.28,"MedicalRetrieval":47.48,"MMarcoRetrieval":77.0,"T2Retrieval":76.3,"VideoRetrieval":56.54} -{"level_0":58,"index":226,"Rank":59,"Model":"m3e-ernie-xbase-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.76,"CmedqaRetrieval":32.33,"CovidRetrieval":67.22,"DuRetrieval":75.27,"EcomRetrieval":58.24,"MedicalRetrieval":54.78,"MMarcoRetrieval":57.91,"T2Retrieval":69.22,"VideoRetrieval":63.09} -{"level_0":59,"index":274,"Rank":60,"Model":"ZNV-Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.2,"CmedqaRetrieval":30.34,"CovidRetrieval":58.55,"DuRetrieval":75.05,"EcomRetrieval":52.54,"MedicalRetrieval":50.08,"MMarcoRetrieval":62.26,"T2Retrieval":72.7,"VideoRetrieval":64.08} -{"level_0":60,"index":188,"Rank":61,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.96,"CmedqaRetrieval":23.91,"CovidRetrieval":68.14,"DuRetrieval":72.84,"EcomRetrieval":52.02,"MedicalRetrieval":43.27,"MMarcoRetrieval":74.66,"T2Retrieval":72.81,"VideoRetrieval":56.07} -{"level_0":61,"index":220,"Rank":62,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":56.91,"CmedqaRetrieval":30.33,"CovidRetrieval":66.42,"DuRetrieval":75.76,"EcomRetrieval":50.27,"MedicalRetrieval":42.79,"MMarcoRetrieval":65.46,"T2Retrieval":73.14,"VideoRetrieval":51.11} -{"level_0":62,"index":221,"Rank":63,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":54.75,"CmedqaRetrieval":30.73,"CovidRetrieval":61.33,"DuRetrieval":74.69,"EcomRetrieval":45.18,"MedicalRetrieval":48.66,"MMarcoRetrieval":61.06,"T2Retrieval":72.36,"VideoRetrieval":44.02} -{"level_0":63,"index":311,"Rank":64,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.0,"CmedqaRetrieval":22.36,"CovidRetrieval":57.21,"DuRetrieval":71.17,"EcomRetrieval":44.49,"MedicalRetrieval":37.92,"MMarcoRetrieval":69.86,"T2Retrieval":69.14,"VideoRetrieval":43.85} -{"level_0":64,"index":190,"Rank":65,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.16,"CmedqaRetrieval":20.03,"CovidRetrieval":33.57,"DuRetrieval":56.04,"EcomRetrieval":47.0,"MedicalRetrieval":36.68,"MMarcoRetrieval":69.31,"T2Retrieval":51.57,"VideoRetrieval":55.07} -{"level_0":65,"index":279,"Rank":66,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":44.4,"CmedqaRetrieval":18.04,"CovidRetrieval":55.48,"DuRetrieval":59.36,"EcomRetrieval":40.48,"MedicalRetrieval":29.8,"MMarcoRetrieval":55.31,"T2Retrieval":58.67,"VideoRetrieval":38.04} -{"level_0":66,"index":53,"Rank":67,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":41.94,"CmedqaRetrieval":15.53,"CovidRetrieval":60.48,"DuRetrieval":51.87,"EcomRetrieval":37.58,"MedicalRetrieval":30.93,"MMarcoRetrieval":45.96,"T2Retrieval":50.52,"VideoRetrieval":42.65} -{"level_0":67,"index":277,"Rank":68,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":38.79,"CmedqaRetrieval":15.91,"CovidRetrieval":44.81,"DuRetrieval":52.23,"EcomRetrieval":34.6,"MedicalRetrieval":27.56,"MMarcoRetrieval":44.06,"T2Retrieval":51.67,"VideoRetrieval":39.52} -{"level_0":68,"index":48,"Rank":69,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":32.1,"CmedqaRetrieval":11.35,"CovidRetrieval":39.63,"DuRetrieval":41.16,"EcomRetrieval":32.71,"MedicalRetrieval":20.35,"MMarcoRetrieval":34.62,"T2Retrieval":41.8,"VideoRetrieval":35.15} -{"level_0":69,"index":49,"Rank":70,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":22.92,"CmedqaRetrieval":10.15,"CovidRetrieval":28.85,"DuRetrieval":33.41,"EcomRetrieval":9.69,"MedicalRetrieval":14.1,"MMarcoRetrieval":44.62,"T2Retrieval":28.35,"VideoRetrieval":14.17} -{"level_0":70,"index":159,"Rank":71,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":7.15,"CmedqaRetrieval":7.22,"CovidRetrieval":10.38,"DuRetrieval":1.44,"EcomRetrieval":4.68,"MedicalRetrieval":4.08,"MMarcoRetrieval":11.0,"T2Retrieval":1.31,"VideoRetrieval":17.05} -{"level_0":71,"index":289,"Rank":72,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":4.52,"CmedqaRetrieval":2.22,"CovidRetrieval":2.32,"DuRetrieval":5.86,"EcomRetrieval":10.37,"MedicalRetrieval":2.37,"MMarcoRetrieval":3.64,"T2Retrieval":4.64,"VideoRetrieval":4.77} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":63.66,"CmedqaRetrieval":28.67,"CmedqaRetrieval (cmn-Hans)":28.66,"CovidRetrieval":75.51,"CovidRetrieval (cmn-Hans)":75.61,"DuRetrieval":85.32,"DuRetrieval (cmn-Hans)":85.3,"EcomRetrieval":54.75,"EcomRetrieval (cmn-Hans)":54.67,"MedicalRetrieval":51.44,"MedicalRetrieval (cmn-Hans)":51.44,"MMarcoRetrieval":79.2,"MMarcoRetrieval (cmn-Hans)":79.2,"T2Retrieval":76.11,"T2Retrieval (cmn-Hans)":76.07,"VideoRetrieval":58.25,"VideoRetrieval (cmn-Hans)":58.28} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":61.61,"CmedqaRetrieval":27.2,"CmedqaRetrieval (cmn-Hans)":27.2,"CovidRetrieval":73.45,"CovidRetrieval (cmn-Hans)":73.48,"DuRetrieval":81.64,"DuRetrieval (cmn-Hans)":81.66,"EcomRetrieval":54.17,"EcomRetrieval (cmn-Hans)":54.01,"MedicalRetrieval":48.35,"MedicalRetrieval (cmn-Hans)":48.33,"MMarcoRetrieval":76.04,"MMarcoRetrieval (cmn-Hans)":76.01,"T2Retrieval":70.86,"T2Retrieval (cmn-Hans)":70.77,"VideoRetrieval":61.3,"VideoRetrieval (cmn-Hans)":61.26} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":59.94,"CmedqaRetrieval":24.38,"CmedqaRetrieval (cmn-Hans)":24.36,"CovidRetrieval":72.82,"CovidRetrieval (cmn-Hans)":72.82,"DuRetrieval":81.35,"DuRetrieval (cmn-Hans)":81.36,"EcomRetrieval":53.56,"EcomRetrieval (cmn-Hans)":53.53,"MedicalRetrieval":44.84,"MedicalRetrieval (cmn-Hans)":44.84,"MMarcoRetrieval":73.17,"MMarcoRetrieval (cmn-Hans)":73.17,"T2Retrieval":71.39,"T2Retrieval (cmn-Hans)":71.36,"VideoRetrieval":58.09,"VideoRetrieval (cmn-Hans)":58.06} +{"Rank":4,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":5,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":6,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":7,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":8,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":9,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":10,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":12,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":13,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":14,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":15,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":16,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":46.56,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":84.03,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":87.85,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":68.79,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":65.92,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":79.93,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":86.76,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":75.43,"VideoRetrieval (cmn-Hans)":null} +{"Rank":17,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":null,"CmedqaRetrieval":43.47,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":80.87,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":86.01,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":66.46,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":61.33,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":73.83,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":83.58,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":69.41,"VideoRetrieval (cmn-Hans)":null} +{"Rank":18,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"CmedqaRetrieval":41.61,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":74.7,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":85.07,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":64.25,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":56.51,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":77.69,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":83.71,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":72.35,"VideoRetrieval (cmn-Hans)":null} +{"Rank":19,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"CmedqaRetrieval":41.03,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":75.07,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":84.68,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":65.6,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":58.28,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":81.38,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":84.39,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":73.93,"VideoRetrieval (cmn-Hans)":null} +{"Rank":20,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"CmedqaRetrieval":42.57,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":73.35,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":86.32,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":65.33,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":59.59,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":79.23,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":83.99,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":73.32,"VideoRetrieval (cmn-Hans)":null} +{"Rank":21,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":22,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":null,"CmedqaRetrieval":35.11,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":70.14,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":77.28,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":55.71,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":49.8,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":63.48,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":76.43,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":66.19,"VideoRetrieval (cmn-Hans)":null} +{"Rank":23,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":24,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":25,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":26,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":27,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":28,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"CmedqaRetrieval":15.53,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":60.48,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":51.87,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":37.58,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":30.93,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":45.96,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":50.52,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":42.65,"VideoRetrieval (cmn-Hans)":null} +{"Rank":29,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":30,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":31,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":32,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":35.58,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":73.47,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":88.18,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":54.33,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":55.81,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":76.54,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":82.96,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":53.85} +{"Rank":33,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":34,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":35,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":36,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":37,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":38,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":39,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":40,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":41,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":42,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":43,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":44,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":45,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":46,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":47,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":48,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":49,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":50,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":51,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":52,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":53,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":54,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":55,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":56,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":57,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":58,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":59,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":60,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":61,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":62,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":63,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":64,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"CmedqaRetrieval":30.33,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":66.42,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":75.76,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":50.27,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":42.79,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":65.46,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":73.14,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":51.11,"VideoRetrieval (cmn-Hans)":null} +{"Rank":65,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"CmedqaRetrieval":30.73,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":61.33,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":74.69,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":45.18,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":48.66,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":61.06,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":72.36,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":44.02,"VideoRetrieval (cmn-Hans)":null} +{"Rank":66,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":67,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":68,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":69,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":70,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":71,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":72,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":73,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":74,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":5.49,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":28.6,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":26.34,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":25.42,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":6.68,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":34.78,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":25.32,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":22.04} +{"Rank":75,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":2.58,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":10.79,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":6.62,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":4.01,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":2.3,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":7.46,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":4.82,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":9.38} +{"Rank":76,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":2.03,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":0.8,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":3.03,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":3.7,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":1.76,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":6.21,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":1.6,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":9.79} +{"Rank":77,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":2.0,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":3.7,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":4.92,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":3.94,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":1.71,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":7.13,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":2.98,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":8.48} +{"Rank":78,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":79,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":80,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":81,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":82,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":83,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":84,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":85,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":86,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":87,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":88,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":10.78,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":30.11,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":34.72,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":13.32,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":15.46,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":46.62,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":30.31,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":14.71} +{"Rank":89,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":10.15,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":28.85,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":33.41,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":9.69,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":14.1,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":44.62,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":28.35,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":14.18} +{"Rank":90,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":91,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":92,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":93,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":94,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":95,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":96,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"CmedqaRetrieval":15.91,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":44.81,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":52.23,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":34.6,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":27.56,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":44.06,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":51.67,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":39.52,"VideoRetrieval (cmn-Hans)":null} +{"Rank":97,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":null,"CmedqaRetrieval":18.04,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":55.48,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":59.36,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":40.48,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":29.8,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":55.31,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":58.67,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":38.04,"VideoRetrieval (cmn-Hans)":null} +{"Rank":98,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":99,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":100,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":101,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":102,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":103,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":104,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":105,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":106,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":107,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":22.36,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":57.21,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":71.17,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":44.49,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":37.92,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":69.86,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":69.14,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":43.85,"VideoRetrieval (cmn-Hans)":null} +{"Rank":108,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":109,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":110,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} +{"Rank":111,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"CmedqaRetrieval":47.64,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":86.86,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":88.43,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":66.39,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":61.1,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":80.17,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":80.11,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":74.28,"VideoRetrieval (cmn-Hans)":null} +{"Rank":112,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"CmedqaRetrieval":null,"CmedqaRetrieval (cmn-Hans)":null,"CovidRetrieval":null,"CovidRetrieval (cmn-Hans)":null,"DuRetrieval":null,"DuRetrieval (cmn-Hans)":null,"EcomRetrieval":null,"EcomRetrieval (cmn-Hans)":null,"MedicalRetrieval":null,"MedicalRetrieval (cmn-Hans)":null,"MMarcoRetrieval":null,"MMarcoRetrieval (cmn-Hans)":null,"T2Retrieval":null,"T2Retrieval (cmn-Hans)":null,"VideoRetrieval":null,"VideoRetrieval (cmn-Hans)":null} diff --git a/boards_data/zh/data_tasks/STS/default.jsonl b/boards_data/zh/data_tasks/STS/default.jsonl index 5865b3628b6b1c8ffe98c00b108af8465256d380..6e3d57e76abad60aac5b18c8fa6b71300babda6b 100644 --- a/boards_data/zh/data_tasks/STS/default.jsonl +++ b/boards_data/zh/data_tasks/STS/default.jsonl @@ -1,87 +1,112 @@ -{"level_0":0,"index":274,"Rank":1,"Model":"ZNV-Embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.66,"AFQMC":53.84,"ATEC":54.44,"BQ":70.57,"LCQMC":74.99,"PAWSX":58.35,"QBQTC":71.45,"STS22 (zh)":74.92,"STSB":82.74} -{"level_0":1,"index":17,"Rank":2,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":65.33,"AFQMC":72.25,"ATEC":62.62,"BQ":81.25,"LCQMC":73.81,"PAWSX":54.06,"QBQTC":31.37,"STS22 (zh)":66.13,"STSB":81.17} -{"level_0":2,"index":142,"Rank":3,"Model":"gte-Qwen2-7B-instruct-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.33,"AFQMC":72.25,"ATEC":62.62,"BQ":81.25,"LCQMC":73.81,"PAWSX":54.06,"QBQTC":31.37,"STS22 (zh)":66.13,"STSB":81.17} -{"level_0":3,"index":234,"Rank":4,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.33,"AFQMC":72.25,"ATEC":62.62,"BQ":81.25,"LCQMC":73.81,"PAWSX":54.06,"QBQTC":31.37,"STS22 (zh)":66.13,"STSB":81.17} -{"level_0":4,"index":46,"Rank":5,"Model":"Dmeta-embedding-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.89,"AFQMC":71.13,"ATEC":64.64,"BQ":72.3,"LCQMC":78.76,"PAWSX":45.05,"QBQTC":42.62,"STS22 (zh)":63.84,"STSB":80.77} -{"level_0":5,"index":207,"Rank":6,"Model":"xiaobu-embedding-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.53,"AFQMC":60.96,"ATEC":58.81,"BQ":75.08,"LCQMC":79.82,"PAWSX":47.42,"QBQTC":45.14,"STS22 (zh)":66.96,"STSB":82.05} -{"level_0":6,"index":169,"Rank":7,"Model":"zpoint_large_embedding_zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":64.22,"AFQMC":60.72,"ATEC":58.77,"BQ":75.04,"LCQMC":79.75,"PAWSX":45.94,"QBQTC":43.46,"STS22 (zh)":66.71,"STSB":83.34} -{"level_0":7,"index":253,"Rank":8,"Model":"piccolo-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.5,"AFQMC":61.49,"ATEC":59.2,"BQ":72.88,"LCQMC":79.55,"PAWSX":46.48,"QBQTC":45.91,"STS22 (zh)":63.76,"STSB":78.71} -{"level_0":8,"index":38,"Rank":9,"Model":"Yinka<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.33,"AFQMC":61.02,"ATEC":59.02,"BQ":72.76,"LCQMC":79.43,"PAWSX":45.92,"QBQTC":44.87,"STS22 (zh)":64.41,"STSB":79.24} -{"level_0":9,"index":50,"Rank":10,"Model":"IYun-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.23,"AFQMC":60.89,"ATEC":58.82,"BQ":72.53,"LCQMC":79.19,"PAWSX":45.8,"QBQTC":43.94,"STS22 (zh)":65.18,"STSB":79.48} -{"level_0":10,"index":173,"Rank":11,"Model":"stella-base-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.49,"AFQMC":59.22,"ATEC":58.43,"BQ":70.44,"LCQMC":78.27,"PAWSX":43.97,"QBQTC":40.02,"STS22 (zh)":66.49,"STSB":83.05} -{"level_0":11,"index":116,"Rank":12,"Model":"stella-mrl-large-zh-v3.5-1792d-1024<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.48,"AFQMC":58.85,"ATEC":58.08,"BQ":71.05,"LCQMC":78.26,"PAWSX":45.36,"QBQTC":38.98,"STS22 (zh)":66.26,"STSB":82.96} -{"level_0":12,"index":155,"Rank":13,"Model":"stella-mrl-large-zh-v3.5-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.48,"AFQMC":58.85,"ATEC":58.08,"BQ":71.05,"LCQMC":78.26,"PAWSX":45.36,"QBQTC":38.98,"STS22 (zh)":66.26,"STSB":82.96} -{"level_0":13,"index":154,"Rank":14,"Model":"stella-large-zh-v3-1792d<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.46,"AFQMC":59.11,"ATEC":58.19,"BQ":71.07,"LCQMC":78.27,"PAWSX":45.0,"QBQTC":38.69,"STS22 (zh)":66.53,"STSB":82.8} -{"level_0":14,"index":12,"Rank":15,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.46,"AFQMC":59.11,"ATEC":58.19,"BQ":71.07,"LCQMC":78.27,"PAWSX":44.98,"QBQTC":38.69,"STS22 (zh)":66.53,"STSB":82.8} -{"level_0":15,"index":15,"Rank":16,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":62.32,"AFQMC":58.47,"ATEC":55.46,"BQ":77.59,"LCQMC":76.29,"PAWSX":50.22,"QBQTC":31.82,"STS22 (zh)":67.36,"STSB":81.37} -{"level_0":16,"index":276,"Rank":17,"Model":"AGE_Hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.15,"AFQMC":58.61,"ATEC":58.03,"BQ":70.98,"LCQMC":78.14,"PAWSX":45.59,"QBQTC":38.1,"STS22 (zh)":65.34,"STSB":82.42} -{"level_0":17,"index":47,"Rank":18,"Model":"Dmeta-embedding-zh-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.13,"AFQMC":59.54,"ATEC":58.63,"BQ":70.31,"LCQMC":78.73,"PAWSX":42.97,"QBQTC":41.03,"STS22 (zh)":66.76,"STSB":79.09} -{"level_0":18,"index":129,"Rank":19,"Model":"acge_text_embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":62.09,"AFQMC":58.81,"ATEC":57.95,"BQ":70.37,"LCQMC":78.17,"PAWSX":45.51,"QBQTC":37.31,"STS22 (zh)":65.85,"STSB":82.73} -{"level_0":19,"index":16,"Rank":20,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.96,"AFQMC":58.42,"ATEC":55.65,"BQ":73.85,"LCQMC":75.39,"PAWSX":42.46,"QBQTC":35.15,"STS22 (zh)":67.4,"STSB":79.4} -{"level_0":20,"index":233,"Rank":21,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.96,"AFQMC":58.42,"ATEC":55.65,"BQ":73.85,"LCQMC":75.39,"PAWSX":42.46,"QBQTC":35.15,"STS22 (zh)":67.4,"STSB":79.4} -{"level_0":21,"index":315,"Rank":22,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":60.07,"AFQMC":50.8,"ATEC":53.23,"BQ":66.49,"LCQMC":76.6,"PAWSX":47.56,"QBQTC":39.96,"STS22 (zh)":65.78,"STSB":80.14} -{"level_0":22,"index":280,"Rank":23,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.39,"AFQMC":50.59,"ATEC":51.29,"BQ":66.07,"LCQMC":75.74,"PAWSX":41.49,"QBQTC":38.11,"STS22 (zh)":69.25,"STSB":82.56} -{"level_0":23,"index":199,"Rank":24,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.39,"AFQMC":50.59,"ATEC":51.29,"BQ":66.07,"LCQMC":75.74,"PAWSX":41.49,"QBQTC":38.11,"STS22 (zh)":69.25,"STSB":82.56} -{"level_0":24,"index":175,"Rank":25,"Model":"stella-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.66,"AFQMC":49.95,"ATEC":53.12,"BQ":65.54,"LCQMC":77.48,"PAWSX":36.22,"QBQTC":38.73,"STS22 (zh)":68.87,"STSB":79.37} -{"level_0":25,"index":21,"Rank":26,"Model":"tao-8k<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.66,"AFQMC":49.94,"ATEC":53.11,"BQ":65.54,"LCQMC":77.48,"PAWSX":36.22,"QBQTC":38.74,"STS22 (zh)":68.88,"STSB":79.37} -{"level_0":26,"index":20,"Rank":27,"Model":"tao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.66,"AFQMC":49.94,"ATEC":53.11,"BQ":65.54,"LCQMC":77.48,"PAWSX":36.22,"QBQTC":38.74,"STS22 (zh)":68.88,"STSB":79.36} -{"level_0":27,"index":206,"Rank":28,"Model":"xiaobu-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.52,"AFQMC":54.85,"ATEC":55.13,"BQ":65.57,"LCQMC":75.55,"PAWSX":37.87,"QBQTC":33.69,"STS22 (zh)":64.32,"STSB":81.19} -{"level_0":28,"index":105,"Rank":29,"Model":"alime-embedding-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.41,"AFQMC":54.73,"ATEC":55.13,"BQ":64.89,"LCQMC":75.12,"PAWSX":37.93,"QBQTC":34.21,"STS22 (zh)":64.54,"STSB":80.72} -{"level_0":29,"index":174,"Rank":30,"Model":"stella-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.3,"AFQMC":54.49,"ATEC":54.45,"BQ":62.38,"LCQMC":76.28,"PAWSX":37.97,"QBQTC":37.84,"STS22 (zh)":67.28,"STSB":75.71} -{"level_0":30,"index":252,"Rank":31,"Model":"piccolo-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":58.02,"AFQMC":54.17,"ATEC":54.28,"BQ":62.31,"LCQMC":75.81,"PAWSX":38.31,"QBQTC":38.22,"STS22 (zh)":66.66,"STSB":74.43} -{"level_0":31,"index":306,"Rank":32,"Model":"bi-cse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.91,"AFQMC":43.45,"ATEC":48.01,"BQ":65.58,"LCQMC":75.56,"PAWSX":47.62,"QBQTC":40.43,"STS22 (zh)":67.24,"STSB":75.38} -{"level_0":32,"index":286,"Rank":33,"Model":"gte-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.82,"AFQMC":54.58,"ATEC":54.6,"BQ":64.41,"LCQMC":74.4,"PAWSX":38.55,"QBQTC":33.36,"STS22 (zh)":62.98,"STSB":79.68} -{"level_0":33,"index":51,"Rank":34,"Model":"mist-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.08,"AFQMC":46.98,"ATEC":51.35,"BQ":65.08,"LCQMC":76.96,"PAWSX":30.03,"QBQTC":37.5,"STS22 (zh)":69.2,"STSB":79.57} -{"level_0":34,"index":172,"Rank":35,"Model":"stella-base-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.92,"AFQMC":46.73,"ATEC":51.22,"BQ":64.8,"LCQMC":76.89,"PAWSX":29.7,"QBQTC":37.38,"STS22 (zh)":69.02,"STSB":79.63} -{"level_0":35,"index":29,"Rank":36,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.87,"AFQMC":47.17,"ATEC":50.75,"BQ":62.02,"LCQMC":75.95,"PAWSX":30.57,"QBQTC":38.98,"STS22 (zh)":68.68,"STSB":80.87} -{"level_0":36,"index":171,"Rank":37,"Model":"stella-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":56.54,"AFQMC":51.75,"ATEC":52.82,"BQ":63.29,"LCQMC":75.83,"PAWSX":32.02,"QBQTC":36.47,"STS22 (zh)":67.52,"STSB":72.63} -{"level_0":37,"index":27,"Rank":38,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":56.25,"AFQMC":44.36,"ATEC":49.54,"BQ":62.94,"LCQMC":74.33,"PAWSX":33.92,"QBQTC":37.29,"STS22 (zh)":68.94,"STSB":78.7} -{"level_0":38,"index":284,"Rank":39,"Model":"gte-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.96,"AFQMC":49.07,"ATEC":50.83,"BQ":65.5,"LCQMC":74.06,"PAWSX":27.97,"QBQTC":35.19,"STS22 (zh)":63.64,"STSB":81.46} -{"level_0":39,"index":251,"Rank":40,"Model":"piccolo-base-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":55.9,"AFQMC":51.4,"ATEC":52.59,"BQ":62.68,"LCQMC":75.4,"PAWSX":31.58,"QBQTC":36.48,"STS22 (zh)":66.51,"STSB":70.6} -{"level_0":40,"index":40,"Rank":41,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.55,"AFQMC":46.7,"ATEC":49.95,"BQ":56.08,"LCQMC":74.96,"PAWSX":33.49,"QBQTC":31.06,"STS22 (zh)":64.51,"STSB":79.63} -{"level_0":41,"index":41,"Rank":42,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.35,"AFQMC":46.77,"ATEC":50.18,"BQ":55.25,"LCQMC":74.29,"PAWSX":33.24,"QBQTC":30.92,"STS22 (zh)":64.41,"STSB":79.74} -{"level_0":42,"index":23,"Rank":43,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":53.72,"AFQMC":42.4,"ATEC":48.17,"BQ":61.78,"LCQMC":74.45,"PAWSX":20.4,"QBQTC":36.22,"STS22 (zh)":68.01,"STSB":78.31} -{"level_0":43,"index":26,"Rank":44,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":53.0,"AFQMC":43.06,"ATEC":48.29,"BQ":60.53,"LCQMC":74.71,"PAWSX":16.64,"QBQTC":35.2,"STS22 (zh)":67.19,"STSB":78.41} -{"level_0":44,"index":39,"Rank":45,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":51.45,"AFQMC":44.25,"ATEC":48.24,"BQ":56.27,"LCQMC":74.09,"PAWSX":12.18,"QBQTC":29.94,"STS22 (zh)":66.06,"STSB":80.54} -{"level_0":45,"index":226,"Rank":46,"Model":"m3e-ernie-xbase-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.54,"AFQMC":37.4,"ATEC":42.16,"BQ":70.96,"LCQMC":73.27,"PAWSX":18.55,"QBQTC":29.6,"STS22 (zh)":65.44,"STSB":66.91} -{"level_0":46,"index":220,"Rank":47,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":50.47,"AFQMC":35.87,"ATEC":41.27,"BQ":63.81,"LCQMC":74.88,"PAWSX":12.19,"QBQTC":32.07,"STS22 (zh)":66.73,"STSB":76.97} -{"level_0":47,"index":221,"Rank":48,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":50.42,"AFQMC":36.53,"ATEC":41.8,"BQ":65.2,"LCQMC":74.2,"PAWSX":15.95,"QBQTC":32.65,"STS22 (zh)":62.91,"STSB":74.16} -{"level_0":48,"index":180,"Rank":49,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":50.22,"AFQMC":38.99,"ATEC":42.84,"BQ":50.64,"LCQMC":75.48,"PAWSX":16.81,"QBQTC":31.8,"STS22 (zh)":63.4,"STSB":81.81} -{"level_0":49,"index":288,"Rank":50,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":49.72,"AFQMC":36.69,"ATEC":45.77,"BQ":49.64,"LCQMC":72.72,"PAWSX":12.77,"QBQTC":36.97,"STS22 (zh)":66.72,"STSB":76.51} -{"level_0":50,"index":33,"Rank":51,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":49.1,"AFQMC":33.42,"ATEC":43.01,"BQ":55.22,"LCQMC":72.19,"PAWSX":9.26,"QBQTC":35.29,"STS22 (zh)":67.72,"STSB":76.73} -{"level_0":51,"index":184,"Rank":52,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":48.29,"AFQMC":33.02,"ATEC":39.81,"BQ":46.44,"LCQMC":75.95,"PAWSX":14.63,"QBQTC":29.77,"STS22 (zh)":65.64,"STSB":81.08} -{"level_0":52,"index":183,"Rank":53,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":46.49,"AFQMC":29.67,"ATEC":37.01,"BQ":45.45,"LCQMC":74.15,"PAWSX":12.14,"QBQTC":28.81,"STS22 (zh)":65.64,"STSB":79.05} -{"level_0":53,"index":188,"Rank":54,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":45.6,"AFQMC":27.98,"ATEC":35.86,"BQ":46.23,"LCQMC":72.8,"PAWSX":15.66,"QBQTC":32.65,"STS22 (zh)":54.08,"STSB":79.53} -{"level_0":54,"index":186,"Rank":55,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":45.27,"AFQMC":25.21,"ATEC":35.14,"BQ":43.27,"LCQMC":72.7,"PAWSX":11.01,"QBQTC":30.25,"STS22 (zh)":66.84,"STSB":77.73} -{"level_0":55,"index":53,"Rank":56,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":44.97,"AFQMC":24.51,"ATEC":32.45,"BQ":44.22,"LCQMC":69.16,"PAWSX":14.55,"QBQTC":29.51,"STS22 (zh)":65.94,"STSB":79.45} -{"level_0":56,"index":48,"Rank":57,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.62,"AFQMC":22.57,"ATEC":30.3,"BQ":40.98,"LCQMC":68.4,"PAWSX":15.08,"QBQTC":27.92,"STS22 (zh)":61.58,"STSB":82.17} -{"level_0":57,"index":277,"Rank":58,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":43.41,"AFQMC":26.06,"ATEC":31.93,"BQ":42.67,"LCQMC":70.16,"PAWSX":17.21,"QBQTC":24.62,"STS22 (zh)":55.35,"STSB":79.3} -{"level_0":58,"index":311,"Rank":59,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":43.35,"AFQMC":23.88,"ATEC":29.25,"BQ":45.33,"LCQMC":68.41,"PAWSX":16.55,"QBQTC":30.27,"STS22 (zh)":62.53,"STSB":70.61} -{"level_0":59,"index":279,"Rank":60,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":42.78,"AFQMC":22.24,"ATEC":30.84,"BQ":43.33,"LCQMC":66.74,"PAWSX":12.31,"QBQTC":27.2,"STS22 (zh)":66.4,"STSB":73.22} -{"level_0":60,"index":190,"Rank":61,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":42.57,"AFQMC":25.32,"ATEC":33.46,"BQ":43.93,"LCQMC":71.98,"PAWSX":14.23,"QBQTC":32.92,"STS22 (zh)":40.97,"STSB":77.77} -{"level_0":61,"index":49,"Rank":62,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":39.11,"AFQMC":15.69,"ATEC":20.27,"BQ":36.33,"LCQMC":63.3,"PAWSX":12.16,"QBQTC":22.53,"STS22 (zh)":61.75,"STSB":80.84} -{"level_0":62,"index":159,"Rank":63,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":36.48,"AFQMC":17.63,"ATEC":26.18,"BQ":37.66,"LCQMC":50.11,"PAWSX":32.75,"QBQTC":24.48,"STS22 (zh)":52.82,"STSB":50.18} -{"level_0":63,"index":289,"Rank":64,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":20.86,"AFQMC":3.76,"ATEC":10.09,"BQ":19.31,"LCQMC":42.15,"PAWSX":6.14,"QBQTC":6.12,"STS22 (zh)":33.7,"STSB":45.6} -{"level_0":64,"index":11,"Rank":76,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":49.41,"STSB":""} -{"level_0":65,"index":67,"Rank":110,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":54.18,"STSB":""} -{"level_0":66,"index":74,"Rank":117,"Model":"paraphrase-multilingual-mpnet-base-v2-KE_Sieve<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":60.0,"STSB":""} -{"level_0":67,"index":91,"Rank":134,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":25.49,"STSB":""} -{"level_0":68,"index":92,"Rank":135,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":6.52,"STSB":""} -{"level_0":69,"index":126,"Rank":167,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":64.04,"STSB":""} -{"level_0":70,"index":138,"Rank":178,"Model":"sgpt-bloom-1b7-nli<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":58.54,"STSB":""} -{"level_0":71,"index":139,"Rank":179,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":66.78,"STSB":""} -{"level_0":72,"index":189,"Rank":214,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":30.55,"ATEC":37.35,"BQ":47.17,"LCQMC":75.32,"PAWSX":17.46,"QBQTC":33.7,"STS22 (zh)":"","STSB":81.52} -{"level_0":73,"index":191,"Rank":215,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":31.87,"ATEC":38.8,"BQ":47.67,"LCQMC":75.34,"PAWSX":17.25,"QBQTC":34.48,"STS22 (zh)":"","STSB":82.03} -{"level_0":74,"index":254,"Rank":267,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":63.02,"STSB":""} -{"level_0":75,"index":255,"Rank":268,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":33.15,"STSB":""} -{"level_0":76,"index":256,"Rank":269,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":44.93,"STSB":""} -{"level_0":77,"index":258,"Rank":271,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":16.35,"STSB":""} -{"level_0":78,"index":260,"Rank":273,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":4.78,"STSB":""} -{"level_0":79,"index":261,"Rank":274,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":54.32,"STSB":""} -{"level_0":80,"index":263,"Rank":276,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":27.32,"STSB":""} -{"level_0":81,"index":264,"Rank":277,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":31.16,"STSB":""} -{"level_0":82,"index":270,"Rank":283,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":32.9,"STSB":""} -{"level_0":83,"index":271,"Rank":284,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":30.47,"STSB":""} -{"level_0":84,"index":272,"Rank":285,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":33.55,"STSB":""} -{"level_0":85,"index":275,"Rank":287,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":32.83,"STSB":""} -{"level_0":86,"index":278,"Rank":288,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AFQMC":"","ATEC":"","BQ":"","LCQMC":"","PAWSX":"","QBQTC":"","STS22 (zh)":63.24,"STSB":""} +{"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":47.05,"AFQMC":33.02,"AFQMC (cmn-Hans)":33.01,"ATEC":39.81,"ATEC (cmn-Hans)":39.8,"BQ":46.44,"BQ (cmn-Hans)":46.44,"LCQMC":75.95,"LCQMC (cmn-Hans)":75.95,"PAWSX":14.63,"PAWSX (cmn-Hans)":14.63,"QBQTC":29.77,"STSB":81.08,"STSB (cmn-Hans)":81.08} +{"Rank":2,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":44.9,"AFQMC":29.67,"AFQMC (cmn-Hans)":29.66,"ATEC":37.01,"ATEC (cmn-Hans)":37.01,"BQ":45.45,"BQ (cmn-Hans)":45.45,"LCQMC":74.15,"LCQMC (cmn-Hans)":74.15,"PAWSX":12.14,"PAWSX (cmn-Hans)":12.13,"QBQTC":28.81,"STSB":79.05,"STSB (cmn-Hans)":79.04} +{"Rank":3,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":43.1,"AFQMC":25.21,"AFQMC (cmn-Hans)":25.21,"ATEC":35.14,"ATEC (cmn-Hans)":35.14,"BQ":43.27,"BQ (cmn-Hans)":43.27,"LCQMC":72.7,"LCQMC (cmn-Hans)":72.7,"PAWSX":11.01,"PAWSX (cmn-Hans)":11.0,"QBQTC":30.25,"STSB":77.73,"STSB (cmn-Hans)":77.73} +{"Rank":4,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":5,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":6,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":7,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":8,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":9,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":10,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":11,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":12,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":13,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":14,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":15,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":16,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":59.11,"AFQMC (cmn-Hans)":null,"ATEC":58.19,"ATEC (cmn-Hans)":null,"BQ":71.07,"BQ (cmn-Hans)":null,"LCQMC":78.27,"LCQMC (cmn-Hans)":null,"PAWSX":44.98,"PAWSX (cmn-Hans)":null,"QBQTC":38.69,"STSB":82.8,"STSB (cmn-Hans)":null} +{"Rank":17,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":null,"AFQMC":58.47,"AFQMC (cmn-Hans)":null,"ATEC":55.46,"ATEC (cmn-Hans)":null,"BQ":77.59,"BQ (cmn-Hans)":null,"LCQMC":76.29,"LCQMC (cmn-Hans)":null,"PAWSX":50.22,"PAWSX (cmn-Hans)":null,"QBQTC":31.82,"STSB":81.37,"STSB (cmn-Hans)":null} +{"Rank":18,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"AFQMC":42.4,"AFQMC (cmn-Hans)":null,"ATEC":48.17,"ATEC (cmn-Hans)":null,"BQ":61.78,"BQ (cmn-Hans)":null,"LCQMC":74.45,"LCQMC (cmn-Hans)":null,"PAWSX":20.4,"PAWSX (cmn-Hans)":null,"QBQTC":36.22,"STSB":78.31,"STSB (cmn-Hans)":null} +{"Rank":19,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"AFQMC":43.06,"AFQMC (cmn-Hans)":null,"ATEC":48.29,"ATEC (cmn-Hans)":null,"BQ":60.53,"BQ (cmn-Hans)":null,"LCQMC":74.71,"LCQMC (cmn-Hans)":null,"PAWSX":16.64,"PAWSX (cmn-Hans)":null,"QBQTC":35.2,"STSB":78.41,"STSB (cmn-Hans)":null} +{"Rank":20,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"AFQMC":44.36,"AFQMC (cmn-Hans)":null,"ATEC":49.54,"ATEC (cmn-Hans)":null,"BQ":62.94,"BQ (cmn-Hans)":null,"LCQMC":74.33,"LCQMC (cmn-Hans)":null,"PAWSX":33.92,"PAWSX (cmn-Hans)":null,"QBQTC":37.29,"STSB":78.7,"STSB (cmn-Hans)":null} +{"Rank":21,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":2270,"Memory Usage (GB, fp32)":8.46,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":22,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":null,"AFQMC":33.42,"AFQMC (cmn-Hans)":null,"ATEC":43.01,"ATEC (cmn-Hans)":null,"BQ":55.22,"BQ (cmn-Hans)":null,"LCQMC":72.19,"LCQMC (cmn-Hans)":null,"PAWSX":9.26,"PAWSX (cmn-Hans)":null,"QBQTC":35.29,"STSB":76.73,"STSB (cmn-Hans)":null} +{"Rank":23,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":24,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":25,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":26,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":27,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":28,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":null,"AFQMC":24.51,"AFQMC (cmn-Hans)":null,"ATEC":32.45,"ATEC (cmn-Hans)":null,"BQ":44.22,"BQ (cmn-Hans)":null,"LCQMC":69.16,"LCQMC (cmn-Hans)":null,"PAWSX":14.55,"PAWSX (cmn-Hans)":null,"QBQTC":29.51,"STSB":79.45,"STSB (cmn-Hans)":null} +{"Rank":29,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":30,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":31,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":32,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":32.65,"ATEC":null,"ATEC (cmn-Hans)":37.34,"BQ":null,"BQ (cmn-Hans)":38.03,"LCQMC":null,"LCQMC (cmn-Hans)":71.38,"PAWSX":null,"PAWSX (cmn-Hans)":16.4,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":74.11} +{"Rank":33,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":34,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":35,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":36,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":37,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":38,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":39,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":40,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":41,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":42,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":43,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":44,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":45,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":46,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":47,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":48,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":49,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":50,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":51,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":52,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":53,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":54,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":55,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":56,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":57,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":58,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":59,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":60,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":61,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":62,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":63,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":64,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"AFQMC":35.87,"AFQMC (cmn-Hans)":null,"ATEC":41.27,"ATEC (cmn-Hans)":null,"BQ":63.81,"BQ (cmn-Hans)":null,"LCQMC":74.88,"LCQMC (cmn-Hans)":null,"PAWSX":12.19,"PAWSX (cmn-Hans)":null,"QBQTC":32.07,"STSB":76.97,"STSB (cmn-Hans)":null} +{"Rank":65,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"AFQMC":36.53,"AFQMC (cmn-Hans)":null,"ATEC":41.8,"ATEC (cmn-Hans)":null,"BQ":65.2,"BQ (cmn-Hans)":null,"LCQMC":74.2,"LCQMC (cmn-Hans)":null,"PAWSX":15.95,"PAWSX (cmn-Hans)":null,"QBQTC":32.65,"STSB":74.16,"STSB (cmn-Hans)":null} +{"Rank":66,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":67,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":68,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":69,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":70,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":71,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":72,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":73,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":74,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":21.02,"ATEC":null,"ATEC (cmn-Hans)":26.61,"BQ":null,"BQ (cmn-Hans)":42.6,"LCQMC":null,"LCQMC (cmn-Hans)":52.19,"PAWSX":null,"PAWSX (cmn-Hans)":10.23,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":68.38} +{"Rank":75,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":7.94,"ATEC":null,"ATEC (cmn-Hans)":12.97,"BQ":null,"BQ (cmn-Hans)":23.31,"LCQMC":null,"LCQMC (cmn-Hans)":21.04,"PAWSX":null,"PAWSX (cmn-Hans)":7.31,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":36.66} +{"Rank":76,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":8.59,"ATEC":null,"ATEC (cmn-Hans)":13.52,"BQ":null,"BQ (cmn-Hans)":23.84,"LCQMC":null,"LCQMC (cmn-Hans)":23.85,"PAWSX":null,"PAWSX (cmn-Hans)":7.21,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":37.8} +{"Rank":77,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":8.01,"ATEC":null,"ATEC (cmn-Hans)":14.03,"BQ":null,"BQ (cmn-Hans)":21.39,"LCQMC":null,"LCQMC (cmn-Hans)":22.84,"PAWSX":null,"PAWSX (cmn-Hans)":6.44,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":37.7} +{"Rank":78,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":79,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":80,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":81,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":82,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":83,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":84,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":85,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":86,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":87,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":88,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":14.3,"ATEC":null,"ATEC (cmn-Hans)":18.42,"BQ":null,"BQ (cmn-Hans)":38.53,"LCQMC":null,"LCQMC (cmn-Hans)":63.96,"PAWSX":null,"PAWSX (cmn-Hans)":10.13,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":78.91} +{"Rank":89,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":15.69,"ATEC":null,"ATEC (cmn-Hans)":20.27,"BQ":null,"BQ (cmn-Hans)":36.33,"LCQMC":null,"LCQMC (cmn-Hans)":63.3,"PAWSX":null,"PAWSX (cmn-Hans)":12.16,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":80.84} +{"Rank":90,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":91,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":92,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":93,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":94,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":95,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":96,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":null,"AFQMC":26.06,"AFQMC (cmn-Hans)":null,"ATEC":31.93,"ATEC (cmn-Hans)":null,"BQ":42.67,"BQ (cmn-Hans)":null,"LCQMC":70.16,"LCQMC (cmn-Hans)":null,"PAWSX":17.21,"PAWSX (cmn-Hans)":null,"QBQTC":24.62,"STSB":79.3,"STSB (cmn-Hans)":null} +{"Rank":97,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":null,"AFQMC":22.24,"AFQMC (cmn-Hans)":null,"ATEC":30.84,"ATEC (cmn-Hans)":null,"BQ":43.33,"BQ (cmn-Hans)":null,"LCQMC":66.74,"LCQMC (cmn-Hans)":null,"PAWSX":12.31,"PAWSX (cmn-Hans)":null,"QBQTC":27.2,"STSB":73.22,"STSB (cmn-Hans)":null} +{"Rank":98,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":99,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":100,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":101,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":102,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":103,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":104,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":105,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":106,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":107,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":23.88,"AFQMC (cmn-Hans)":null,"ATEC":29.25,"ATEC (cmn-Hans)":null,"BQ":45.33,"BQ (cmn-Hans)":null,"LCQMC":68.41,"LCQMC (cmn-Hans)":null,"PAWSX":16.55,"PAWSX (cmn-Hans)":null,"QBQTC":30.27,"STSB":70.61,"STSB (cmn-Hans)":null} +{"Rank":108,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":109,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":110,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} +{"Rank":111,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":null,"AFQMC":50.8,"AFQMC (cmn-Hans)":null,"ATEC":53.23,"ATEC (cmn-Hans)":null,"BQ":66.49,"BQ (cmn-Hans)":null,"LCQMC":76.6,"LCQMC (cmn-Hans)":null,"PAWSX":47.56,"PAWSX (cmn-Hans)":null,"QBQTC":39.96,"STSB":80.14,"STSB (cmn-Hans)":null} +{"Rank":112,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":null,"AFQMC":null,"AFQMC (cmn-Hans)":null,"ATEC":null,"ATEC (cmn-Hans)":null,"BQ":null,"BQ (cmn-Hans)":null,"LCQMC":null,"LCQMC (cmn-Hans)":null,"PAWSX":null,"PAWSX (cmn-Hans)":null,"QBQTC":null,"STSB":null,"STSB (cmn-Hans)":null} diff --git a/refresh.py b/refresh.py index 9249b2a089304dd34461d2ee9424fb3682c7b2ea..dc032c2fdacc77f5550629287bcd1f4795c17b52 100644 --- a/refresh.py +++ b/refresh.py @@ -1,6 +1,7 @@ -import json from __future__ import annotations +import json + import os import re from functools import reduce @@ -136,8 +137,8 @@ def add_lang(examples): return examples -def norm(names: str) -> set: - return set([name.split(" ")[0] for name in names]) +def norm(names: list[str]) -> list[str]: + return list(set([name.split(" ")[0] for name in names])) def add_task(examples): @@ -508,7 +509,7 @@ def get_mteb_data( df.drop(columns=["PawsX (fr)"], inplace=True) # Filter invalid columns - cols = [col for col in cols if col in base_columns + datasets or any([col.startswith(d) for d in datasets])] + cols = [col for col in cols if col in base_columns + datasets or any([col.split()[0] == d for d in datasets])] i = 0 for column in base_columns: if column in cols: @@ -523,11 +524,15 @@ def get_mteb_data( def find_tasks(df_columns: list[str], tasks: list[str]) -> list[str]: - # Some tasks have langs, but original tasks doesn't have languages, This function will find "Task (lang)" -> task + """ + Some tasks have langs, but original tasks doesn't have languages, This function will find task -> Task (lang) + """ used_columns = [] for task in tasks: for col_name in df_columns: - if col_name.startswith(task): + # some french datasets already have lang in their name + # if use starts with instead of split there can be duplicates + if col_name.split()[0] == task or col_name == task: used_columns.append(col_name) return used_columns @@ -670,8 +675,7 @@ def write_out_results(item: dict, item_name: str) -> None: print(f"Saving {main_folder} to {main_folder}/default.jsonl") os.makedirs(main_folder, exist_ok=True) - item.reset_index(inplace=True) - item.to_json(f"{main_folder}/default.jsonl", orient="records", lines=True) + item.reset_index(drop=True).to_json(f"{main_folder}/default.jsonl", orient="records", lines=True) elif isinstance(item, str): print(f"Saving {main_folder} to {main_folder}/default.txt")